import os
import pathlib
import re
import pickle
import numpy as np
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
print(tf.__version__)
# plotting style options
font = {'size': 18}
matplotlib.rc('font', **font)
sns.set_theme()
sns.set(font_scale = 1.2)
%matplotlib inline
2.7.0
The notebook is structured as follows:
Deliverables:
The problem is to find a function $G$ to predict tool-tip forces from position coordinates: $$ \vec{f} = G(x,y,z,a,b,c) $$ Models like these could be solved using a parametrized dynamical model with Lagrange or Newton-Euler methods assuming rigid body motion. However, instead of doing that, I'm going to approximate $G$ with a neural network.
Here are a few sources I looked at for inspiration:
Before training a model, it is always good to look at the data. Let's have a look at time series plots, histograms, and correlations.
workdir = pathlib.Path("./")
# create a timestamp for tagging saved models
import time
timestamp = time.strftime("%Y%m%d_%H%M")
# output directory for saved models, plots, etc.
output_dir = workdir / 'olsson_solution_{}'.format(timestamp)
output_dir.mkdir(parents=True, exist_ok=True)
dataset_filenames = ["Test1", "Test2", "Test4"]
# list of dataframes
datasets = list()
print("loading datasets:")
for filename in dataset_filenames:
print("-", workdir / str(filename+'.csv'))
datasets.append(pd.read_csv(workdir / str(filename+'.csv')))
loading datasets: - Test1.csv - Test2.csv - Test4.csv
print("First five entries: \n", datasets[0].head(5).T)
First five entries:
0 1 2 3 4
t 1.636580e+09 1.636580e+09 1.636580e+09 1.636580e+09 1.636580e+09
a_enc_1 -4.951100e+00 -4.951100e+00 -4.951100e+00 -4.951100e+00 -4.951100e+00
b_enc_1 1.830000e-02 1.830000e-02 1.830000e-02 1.830000e-02 1.830000e-02
c_enc_1 -7.190000e-02 -7.190000e-02 -7.190000e-02 -7.190000e-02 -7.190000e-02
x_enc_1 2.136337e+02 2.136337e+02 2.136337e+02 2.136337e+02 2.136337e+02
y_enc_1 3.241015e+02 3.241015e+02 3.241015e+02 3.241015e+02 3.241015e+02
z_enc_1 8.953528e+02 8.953528e+02 8.953528e+02 8.953528e+02 8.953528e+02
a_enc_2 -1.549772e+02 -1.549772e+02 -1.549772e+02 -1.549772e+02 -1.549772e+02
b_enc_2 2.023000e-01 2.024000e-01 2.024000e-01 2.024000e-01 2.024000e-01
c_enc_2 -1.798798e+02 -1.798798e+02 -1.798798e+02 -1.798798e+02 -1.798798e+02
x_enc_2 2.232210e+01 2.232040e+01 2.232040e+01 2.232040e+01 2.232040e+01
y_enc_2 7.831761e+02 7.831754e+02 7.831754e+02 7.831754e+02 7.831754e+02
z_enc_2 -7.725771e+02 -7.725771e+02 -7.725771e+02 -7.725771e+02 -7.725771e+02
fx_1 -2.326357e+00 -2.192611e+00 -2.103594e+00 -1.869649e+00 -2.336206e+00
fy_1 9.639795e+00 9.531656e+00 9.776526e+00 9.100982e+00 9.058406e+00
fz_1 -3.264595e+01 -3.307391e+01 -3.143578e+01 -3.171914e+01 -3.232948e+01
fx_2 1.180561e+01 1.169716e+01 1.166217e+01 1.141468e+01 1.122329e+01
fy_2 1.865609e+01 1.846252e+01 1.860119e+01 1.848982e+01 1.795298e+01
fz_2 -1.283101e+01 -1.225022e+01 -1.145559e+01 -1.253816e+01 -1.042543e+01
print("\ndescribe():\n", datasets[0].describe().T[['count', 'mean', 'std', 'min', 'max']])
describe():
count mean std min max
t 20091.0 1.636590e+09 5800.523780 1.636580e+09 1.636600e+09
a_enc_1 20091.0 -8.924334e+01 7.811876 -9.001034e+01 -4.951004e+00
b_enc_1 20091.0 8.760533e-04 0.002961 -1.604445e-02 2.338158e-02
c_enc_1 20091.0 1.884124e-03 0.007390 -7.199558e-02 2.159353e-02
x_enc_1 20091.0 4.578387e+02 197.669145 8.937531e+01 8.306894e+02
y_enc_1 20091.0 1.965825e+02 103.805836 -1.773652e+00 3.672685e+02
z_enc_1 20091.0 -6.500421e+01 100.980291 -1.767849e+02 8.953528e+02
a_enc_2 20091.0 8.804365e+01 22.349876 -1.783886e+02 1.789519e+02
b_enc_2 20091.0 1.449096e-03 0.029150 -2.199676e+00 1.164570e+00
c_enc_2 20091.0 -4.858466e+01 173.319815 -1.800000e+02 1.800000e+02
x_enc_2 20091.0 4.560382e+02 202.637762 2.231891e+01 8.318730e+02
y_enc_2 20091.0 1.995663e+02 120.025141 -3.800164e+00 7.831773e+02
z_enc_2 20091.0 -7.605120e+01 81.255998 -7.725771e+02 -6.817898e-01
fx_1 20091.0 3.084250e+01 681.262919 -1.919500e+03 1.876367e+03
fy_1 20091.0 9.071373e+01 1192.944410 -1.841587e+03 2.238735e+03
fz_1 20091.0 2.518588e+03 582.206105 -7.799607e+01 3.374799e+03
fx_2 20091.0 -2.917874e+01 441.132354 -1.488688e+03 1.233724e+03
fy_2 20091.0 -1.031784e+02 729.100519 -1.584797e+03 1.303751e+03
fz_2 20091.0 -7.406174e+02 302.882751 -2.315789e+03 -5.323892e+00
#features = [k for k in datasets[0].keys() if not re.search('^f', k) and 't' not in k]
#outputs = [k for k in datasets[0].keys() if re.search('^f', k)]
features_1 = [i+'_enc_1' for i in 'xyzabc']
features_2 = [i+'_enc_2' for i in 'xyzabc']
outputs_1 = ['fx_1', 'fy_1', 'fz_1']
outputs_2 = ['fx_2', 'fy_2', 'fz_2']
features = features_1 + features_2
outputs = outputs_1 + outputs_2
print('features:', features)
print('outputs:', outputs)
features: ['x_enc_1', 'y_enc_1', 'z_enc_1', 'a_enc_1', 'b_enc_1', 'c_enc_1', 'x_enc_2', 'y_enc_2', 'z_enc_2', 'a_enc_2', 'b_enc_2', 'c_enc_2'] outputs: ['fx_1', 'fy_1', 'fz_1', 'fx_2', 'fy_2', 'fz_2']
seaborn.pairplot can be helpful to get an idea of correlations
# pair plots (slow...)
labels = [features_1+outputs_1, features_2+outputs_2]
for robot_idx in range(2):
fig = plt.figure(figsize=(20, 20))
sns.pairplot(datasets[0].sample(200)[labels[robot_idx]], kind='kde')
plt.savefig(output_dir/'pairplot_robot{}.pdf'.format(robot_idx+1))
<Figure size 1440x1440 with 0 Axes>
<Figure size 1440x1440 with 0 Axes>
Plot correlation matrices
sns.set_style("whitegrid")
labels = [features_1+outputs_1, features_2+outputs_2]
for robot_idx in range(2):
fig = plt.figure(figsize=(10*len(datasets), 8))
for i,df in enumerate(datasets):
corr = df[labels[robot_idx]].corr()
ax = fig.add_subplot(1, len(datasets), i+1)
ax.set_title(dataset_filenames[i], weight='bold').set_fontsize('18')
sns.heatmap(corr, annot=True)
plt.savefig(output_dir/'corr_robot{}.pdf'.format(robot_idx+1))
Observations:
dataset_idx = 0 # Test1
labels = features_1+features_2+outputs_1+outputs_2
fig = plt.figure(figsize=(16, 12))
corr = datasets[dataset_idx][labels].corr()
ax = fig.add_subplot(111)
ax.set_title(dataset_filenames[dataset_idx], weight='bold').set_fontsize('18')
sns.set_style("whitegrid")
sns.heatmap(corr, annot=True)
plt.savefig(output_dir/'corr_{}_both_robots.pdf'.format(dataset_filenames[dataset_idx]))
Observations:
sns.set_style("ticks")
variables_to_plot = [i+'_enc' for i in 'xyzabc'] + ['fx', 'fy', 'fz']
for robot_idx in range(2):
fig = plt.figure(figsize=(30,18))
for i,k in enumerate(variables_to_plot):
k += '_{}'.format(robot_idx+1)
ax = fig.add_subplot(3,3, i+1)
for j in range(len(datasets)):
# shift 't' to start at 0 for each dataset
ax.plot(datasets[j]['t']-min(datasets[j]['t']), datasets[j][k], label=dataset_filenames[j], alpha=0.5)
ax.set_xlabel('t', y=0.5)
ax.set_ylabel(k, y=0.5)
ax.legend(loc=1)
fig.savefig(output_dir/'1d_plot_variables_vs_time_robot{}.pdf'.format(robot_idx+1))
sns.set_style("ticks")
variables_to_plot = [i+'_enc' for i in 'xyzabc'] + ['fx', 'fy', 'fz']
for robot_idx in range(2):
fig = plt.figure(figsize=(30,18))
for i,k in enumerate(variables_to_plot):
k += '_{}'.format(robot_idx+1)
ax = fig.add_subplot(3,3, i+1)
for j in range(len(datasets)):
ax.hist(datasets[j][k], label=dataset_filenames[j], bins=50, alpha=0.5)
ax.set_xlabel(k, y=0.5)
ax.set_ylabel('# entires', y=0.5)
ax.legend(loc=1)
fig.savefig(output_dir/'1d_hist_variables_vs_time_robot{}.pdf'.format(robot_idx+1))
I noticed that the models could more accurately predict forces from motion in datasets not seen during training when adding higher-order derivatives of the position coordinates as additional input features.
First (velocity) and second (acceleration) order derivatives had a notable effect on the performance (more about that in section 4). I also tried to include up to 6th order derivatives (3rd=jerk, 4th=snap, 5th=crackle, 6th=pop) [1]. These had a minor impact but helped in some cases when the arms jerked around a lot.
[1] https://en.wikipedia.org/wiki/Fourth,_fifth,_and_sixth_derivatives_of_position
# differentiate variables in dataframe
def add_gradients(df, keys_to_diff, nth_order=1):
for k in keys_to_diff:
df['d'+str(nth_order)+'_'+re.sub('d\d_','', k)] = np.gradient(df[k])
# add derivatives of position and euler angles:
# 1=velocity, 2=acceleration, 3=jerk, 4=snap, 5=crackle, 6=pop
nth_order = 6
features_to_diff = features
for n in range(1, nth_order+1):
for df in datasets:
add_gradients(df, features_to_diff, n)
features_to_diff = [k for k in datasets[0].keys() if re.search('^d{:d}'.format(n), k)]
# x-axis only
features_x1 = ['x_enc_1'] + ["d{:d}_x_enc_1".format(i) for i in range(1,7)]
features_x2 = ['x_enc_2'] + ["d{:d}_x_enc_2".format(i) for i in range(1,7)]
outputs_x1 = ['fx_1']
outputs_x2 = ['fx_2']
def generate_feature_list(arm_idx, nth_order=6):
features = []
for i in 'xyzabc':
features.append('{}_enc_{}'.format(i, arm_idx))
for i in 'xyzabc':
for j in range(1,nth_order+1):
features.append('d{}_{}_enc_{}'.format(j, i, arm_idx))
return features
# up to 6th order derivatives (velocity, acceleration, jerk, snap, crackle, pop)
features_1_nth = generate_feature_list(1, nth_order)
features_2_nth = generate_feature_list(2, nth_order)
# up to 2nd order derivatives (velocity, acceleration)
features_1_2nd = generate_feature_list(1, 2)
features_2_2nd = generate_feature_list(2, 2)
outputs_1 = ['fx_1', 'fy_1', 'fz_1']
outputs_2 = ['fx_2', 'fy_2', 'fz_2']
idx = 0 # Test1
labels = [features_x1+outputs_x1, features_x2+outputs_x2]
sns.set_style("whitegrid")
for robot_idx in range(2):
fig = plt.figure(figsize=(16, 14))
corr = datasets[idx][labels[robot_idx]].corr()
ax = fig.add_subplot(111)
ax.set_title(dataset_filenames[idx], weight='bold').set_fontsize('18')
sns.heatmap(corr, annot=True)
plt.savefig(output_dir/'corr_derivatives_x_robot{}.pdf'.format(robot_idx+1))
idx = 0 # Test1
labels = [features_1_nth+outputs_1, features_2_nth+outputs_2]
sns.set_style("whitegrid")
for robot_idx in range(2):
fig = plt.figure(figsize=(32, 24))
corr = datasets[idx][labels[robot_idx]].corr()
ax = fig.add_subplot(111)
ax.set_title(dataset_filenames[idx], weight='bold').set_fontsize('18')
sns.heatmap(corr, annot=True)
plt.savefig(output_dir/'corr_derivatives_all_robot{}.pdf'.format(robot_idx+1))
Hold out 'Test2' for testing of model trained on 'Test1' and 'Test4'
df1 = datasets[0] # Test1
df2 = datasets[1] # Test2
df4 = datasets[2] # Test4
df = df1.copy()
#df = df.append(datasets[1])
df = df.append(datasets[2])
features_nth = features_1_nth + features_2_nth
features_2nd = features_1_2nd + features_2_2nd
X = df[features_nth].to_numpy()
Y = df[outputs].to_numpy()
print(X.shape, Y.shape)
(49078, 84) (49078, 6)
Both MinMaxScaler and StandardScaler were tried, the former gave slightly more robust performance.
from sklearn.preprocessing import StandardScaler, MinMaxScaler
#scaler_x = StandardScaler()
#scaler_y = StandardScaler()
scaler_x = MinMaxScaler()
scaler_y = MinMaxScaler()
X_normed = scaler_x.fit_transform(X)
Y_normed = scaler_y.fit_transform(Y)
# sanity check
print(X[0:5,0])
print(X_normed[0:5,0])
print(scaler_x.inverse_transform(X_normed)[0:4,0])
[213.6337 213.6337 213.6337 213.6337 213.6337] [0.21954808 0.21954808 0.21954808 0.21954808 0.21954808] [213.6337 213.6337 213.6337 213.6337]
Including multiple time steps of the input features and training an RNN to predict forces improved the performance over using an DNN. After some experimentation, I concluded that about 20 timesteps worked pretty well.
def split_sequences(X, Y, n_steps):
X_seq, Y_seq = list(), list()
for i in range(len(X)):
end_i = i + n_steps
if end_i > len(X):
break
Xi, yi = X[i:end_i, :], Y[end_i-1, :]
X_seq.append(Xi)
Y_seq.append(yi)
return (np.array(X_seq), np.array(Y_seq))
# prepare sequences for RNN with 20 time steps
n_steps = 20
X_seq, Y_seq = split_sequences(X_normed, Y_normed, n_steps)
print(X_seq.shape, Y_seq.shape)
(49059, 20, 84) (49059, 6)
from sklearn.model_selection import train_test_split
# if False: keep last events for testing
shuffle = True
# 70-10-20 train-validation-test split
train_frac = 0.7
val_frac = 0.1
# for dnn
X_train, X_val_test, Y_train, Y_val_test = train_test_split(X_normed, Y_normed, train_size = train_frac, shuffle=shuffle)
X_val, X_test, Y_val, Y_test = train_test_split(X_val_test, Y_val_test, test_size = 1.0-val_frac/(1.0-train_frac), shuffle=False)
print(X_train.shape, Y_train.shape)
print(X_val.shape, Y_val.shape)
print(X_test.shape, Y_test.shape)
# for rnn
X_seq_train, X_seq_val_test, Y_seq_train, Y_seq_val_test = train_test_split(X_seq, Y_seq, train_size = train_frac, shuffle=shuffle)
X_seq_val, X_seq_test, Y_seq_val, Y_seq_test = train_test_split(X_seq_val_test, Y_seq_val_test, test_size = 1.0-val_frac/(1.0-train_frac), shuffle=False)
print(X_seq_train.shape, Y_seq_train.shape)
print(X_seq_val.shape, Y_seq_val.shape)
print(X_seq_test.shape, Y_seq_test.shape)
(34354, 84) (34354, 6) (4907, 84) (4907, 6) (9817, 84) (9817, 6) (34341, 20, 84) (34341, 6) (4905, 20, 84) (4905, 6) (9813, 20, 84) (9813, 6)
# sanity check
print(features)
print(features_2nd)
print(outputs)
['x_enc_1', 'y_enc_1', 'z_enc_1', 'a_enc_1', 'b_enc_1', 'c_enc_1', 'x_enc_2', 'y_enc_2', 'z_enc_2', 'a_enc_2', 'b_enc_2', 'c_enc_2'] ['x_enc_1', 'y_enc_1', 'z_enc_1', 'a_enc_1', 'b_enc_1', 'c_enc_1', 'd1_x_enc_1', 'd2_x_enc_1', 'd1_y_enc_1', 'd2_y_enc_1', 'd1_z_enc_1', 'd2_z_enc_1', 'd1_a_enc_1', 'd2_a_enc_1', 'd1_b_enc_1', 'd2_b_enc_1', 'd1_c_enc_1', 'd2_c_enc_1', 'x_enc_2', 'y_enc_2', 'z_enc_2', 'a_enc_2', 'b_enc_2', 'c_enc_2', 'd1_x_enc_2', 'd2_x_enc_2', 'd1_y_enc_2', 'd2_y_enc_2', 'd1_z_enc_2', 'd2_z_enc_2', 'd1_a_enc_2', 'd2_a_enc_2', 'd1_b_enc_2', 'd2_b_enc_2', 'd1_c_enc_2', 'd2_c_enc_2'] ['fx_1', 'fy_1', 'fz_1', 'fx_2', 'fy_2', 'fz_2']
# indices of 12 position input features
feature_idx = [df[features_nth].columns.get_loc(col) for col in features]
# indices of position, velocity, and acceleration input features
feature_idx_2nd = [df[features_nth].columns.get_loc(col) for col in features_2nd]
# indices of all features
feature_idx_nth = [df[features_nth].columns.get_loc(col) for col in features_nth]
# sanity check
print(df[features][4000:4001].to_numpy())
print(scaler_x.inverse_transform(X_normed)[:,feature_idx][4000:4001,:])
[[ 7.01138030e+02 9.05163185e+01 -2.66579203e+01 -8.99984769e+01 -4.16576996e-04 2.06331677e-03 7.04664055e+02 8.46991898e+01 -2.02531592e+01 8.99942266e+01 -2.56791472e-04 -1.79998530e+02]] [[ 7.01138030e+02 9.05163185e+01 -2.66579203e+01 -8.99984769e+01 -4.16576996e-04 2.06331677e-03 7.04664055e+02 8.46991898e+01 -2.02531593e+01 8.99942266e+01 -2.56791472e-04 -1.79998530e+02]]
# select 12 input features: x,y,z,a,b,c for robots 1 and 2
X_train_12 = X_train[:,feature_idx]
X_val_12 = X_val[:,feature_idx]
X_test_12 = X_test[:,feature_idx]
X_seq_train_12 = X_seq_train[:,:,feature_idx]
X_seq_val_12 = X_seq_val[:,:,feature_idx]
X_seq_test_12 = X_seq_test[:,:,feature_idx]
# select 36 input features: x,y,z,a,b,c + 1st and 2nd order derivatives for robots 1 and 2
X_train_36 = X_train[:,feature_idx_2nd]
X_val_36 = X_val[:,feature_idx_2nd]
X_test_36 = X_test[:,feature_idx_2nd]
X_seq_train_36 = X_seq_train[:,:,feature_idx_2nd]
X_seq_val_36 = X_seq_val[:,:, feature_idx_2nd]
X_seq_test_36 = X_seq_test[:,:,feature_idx_2nd]
# sanity check
print(X_train_12.shape)
print(X_seq_train_12.shape)
print(X_train_36.shape)
print(X_seq_train_36.shape)
(34354, 12) (34341, 20, 12) (34354, 36) (34341, 20, 36)
Finally, we're getting to the exciting part of training some neural nets.
# function to plot loss, to be used several times below
def plot_loss(history, name, title=''):
fig = plt.figure(figsize=(16,10))
ax = fig.add_subplot(111)
ax.plot(history.history['loss'], label='loss')
ax.plot(history.history['val_loss'], label='val_loss')
ax.set_title(title)
ax.set_xlabel('Epoch')
ax.set_ylabel('Error')
ax.legend()
ax.grid(True)
plt.savefig(output_dir/name)
# for comparing test results of different models
test_results = dict()
Let's start with a simple linear regression.
x1_train = X_train[:,0].reshape(len(X_train),1)
fx1_train = Y_train[:,0].reshape(len(Y_train),1)
x1_val = X_val[:,0].reshape(len(X_val),1)
fx1_val = Y_val[:,0].reshape(len(Y_val),1)
x1_test = X_test[:,0].reshape(len(X_test),1)
fx1_test = Y_test[:,0].reshape(len(Y_test),1)
linear_model_x1 = keras.experimental.LinearModel()
linear_model_x1.compile(optimizer='adam', loss='mean_squared_error')
history_linear_x1 = linear_model_x1.fit(
x1_train, fx1_train,
validation_data=(x1_val, fx1_val),
batch_size = 32,
epochs=40)
with open(output_dir/'history_linear_x1.pickle', 'wb') as f:
pickle.dump(history_linear_x1.history, f)
Epoch 1/40 1074/1074 [==============================] - 1s 713us/step - loss: 0.0848 - val_loss: 0.0393 Epoch 2/40 1074/1074 [==============================] - 1s 593us/step - loss: 0.0279 - val_loss: 0.0192 Epoch 3/40 1074/1074 [==============================] - 1s 588us/step - loss: 0.0166 - val_loss: 0.0152 Epoch 4/40 1074/1074 [==============================] - 1s 582us/step - loss: 0.0150 - val_loss: 0.0150 Epoch 5/40 1074/1074 [==============================] - 1s 595us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 6/40 1074/1074 [==============================] - 1s 586us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 7/40 1074/1074 [==============================] - 1s 578us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 8/40 1074/1074 [==============================] - 1s 600us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 9/40 1074/1074 [==============================] - 1s 577us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 10/40 1074/1074 [==============================] - 1s 595us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 11/40 1074/1074 [==============================] - 1s 600us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 12/40 1074/1074 [==============================] - 1s 603us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 13/40 1074/1074 [==============================] - 1s 608us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 14/40 1074/1074 [==============================] - 1s 621us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 15/40 1074/1074 [==============================] - 1s 625us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 16/40 1074/1074 [==============================] - 1s 590us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 17/40 1074/1074 [==============================] - 1s 602us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 18/40 1074/1074 [==============================] - 1s 623us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 19/40 1074/1074 [==============================] - 1s 645us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 20/40 1074/1074 [==============================] - 1s 598us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 21/40 1074/1074 [==============================] - 1s 605us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 22/40 1074/1074 [==============================] - 1s 638us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 23/40 1074/1074 [==============================] - 1s 697us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 24/40 1074/1074 [==============================] - 1s 784us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 25/40 1074/1074 [==============================] - 1s 767us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 26/40 1074/1074 [==============================] - 1s 762us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 27/40 1074/1074 [==============================] - 1s 733us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 28/40 1074/1074 [==============================] - 1s 759us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 29/40 1074/1074 [==============================] - 1s 737us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 30/40 1074/1074 [==============================] - 1s 718us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 31/40 1074/1074 [==============================] - 1s 689us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 32/40 1074/1074 [==============================] - 1s 719us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 33/40 1074/1074 [==============================] - 1s 705us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 34/40 1074/1074 [==============================] - 1s 680us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 35/40 1074/1074 [==============================] - 1s 717us/step - loss: 0.0149 - val_loss: 0.0150 Epoch 36/40 1074/1074 [==============================] - 1s 711us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 37/40 1074/1074 [==============================] - 1s 680us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 38/40 1074/1074 [==============================] - 1s 726us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 39/40 1074/1074 [==============================] - 1s 720us/step - loss: 0.0149 - val_loss: 0.0151 Epoch 40/40 1074/1074 [==============================] - 1s 729us/step - loss: 0.0149 - val_loss: 0.0151
plot_loss(history_linear_x1, 'loss_linear_x1.pdf', '1D linear model ($f_{x_1}$ vs. $x_1$)')
# save model loss on test set for evaluation section below
test_results['linear_x1'] = linear_model_x1.evaluate(x1_test, fx1_test, verbose=0)
fx1_test_pred = linear_model_x1.predict(x1_test)
fig = plt.figure(figsize=(16,10))
ax = fig.add_subplot(111, aspect='equal')
ax.scatter(fx1_test, fx1_test_pred)
ax.plot([np.min(fx1_test_pred), np.max(fx1_test_pred)], [np.min(fx1_test_pred), np.max(fx1_test_pred)], linestyle='dashed', label='Predictions', color='k')
#ax.plot([0.2, 0.8], [0.2, 0.8], linestyle='dashed', label='Predictions', color='k')
ax.set_xlabel('True Values')
ax.set_ylabel('Predictions')
ax.set_title('1D linear model ($f_{x_1}$ vs. $x_1$)')
plt.savefig(output_dir/'pred_vs_true_linear_x1.pdf')
fig = plt.figure(figsize=(16,10))
ax = fig.add_subplot(111, aspect='equal')
ax.scatter(x1_test, fx1_test, label='Data')
ax.plot(x1_test, fx1_test_pred, label='Predictions', color='k')
ax.set_xlabel('$x_{1}$')
ax.set_ylabel('$f_{x{_1}}$')
ax.set_title('1D linear model ($f_{x_1}$ vs. $x_1$)')
ax.legend()
plt.savefig(output_dir/'linear_fx1_vs_x1.pdf')
linear_model_12 = keras.experimental.LinearModel(units=6)
linear_model_12.compile(optimizer='adam', loss='mean_squared_error')
history_linear_12 = linear_model_12.fit(
X_train_12, Y_train,
validation_data=(X_val_12, Y_val),
batch_size = 32,
epochs=200)
with open(output_dir/'history_linear_12.pickle', 'wb') as f:
pickle.dump(history_linear_12.history, f)
Epoch 1/200 1074/1074 [==============================] - 1s 751us/step - loss: 0.0453 - val_loss: 0.0299 Epoch 2/200 1074/1074 [==============================] - 1s 710us/step - loss: 0.0295 - val_loss: 0.0294 Epoch 3/200 1074/1074 [==============================] - 1s 717us/step - loss: 0.0292 - val_loss: 0.0292 Epoch 4/200 1074/1074 [==============================] - 1s 719us/step - loss: 0.0290 - val_loss: 0.0289 Epoch 5/200 1074/1074 [==============================] - 1s 699us/step - loss: 0.0289 - val_loss: 0.0289 Epoch 6/200 1074/1074 [==============================] - 1s 718us/step - loss: 0.0288 - val_loss: 0.0288 Epoch 7/200 1074/1074 [==============================] - 1s 752us/step - loss: 0.0288 - val_loss: 0.0288 Epoch 8/200 1074/1074 [==============================] - 1s 695us/step - loss: 0.0288 - val_loss: 0.0290 Epoch 9/200 1074/1074 [==============================] - 1s 712us/step - loss: 0.0288 - val_loss: 0.0288 Epoch 10/200 1074/1074 [==============================] - 1s 710us/step - loss: 0.0287 - val_loss: 0.0287 Epoch 11/200 1074/1074 [==============================] - 1s 649us/step - loss: 0.0287 - val_loss: 0.0289 Epoch 12/200 1074/1074 [==============================] - 1s 676us/step - loss: 0.0287 - val_loss: 0.0287 Epoch 13/200 1074/1074 [==============================] - 1s 650us/step - loss: 0.0287 - val_loss: 0.0287 Epoch 14/200 1074/1074 [==============================] - 1s 633us/step - loss: 0.0287 - val_loss: 0.0287 Epoch 15/200 1074/1074 [==============================] - 1s 633us/step - loss: 0.0286 - val_loss: 0.0287 Epoch 16/200 1074/1074 [==============================] - 1s 626us/step - loss: 0.0286 - val_loss: 0.0286 Epoch 17/200 1074/1074 [==============================] - 1s 622us/step - loss: 0.0286 - val_loss: 0.0286 Epoch 18/200 1074/1074 [==============================] - 1s 637us/step - loss: 0.0286 - val_loss: 0.0286 Epoch 19/200 1074/1074 [==============================] - 1s 678us/step - loss: 0.0286 - val_loss: 0.0287 Epoch 20/200 1074/1074 [==============================] - 1s 636us/step - loss: 0.0286 - val_loss: 0.0287 Epoch 21/200 1074/1074 [==============================] - 1s 622us/step - loss: 0.0286 - val_loss: 0.0286 Epoch 22/200 1074/1074 [==============================] - 1s 628us/step - loss: 0.0285 - val_loss: 0.0285 Epoch 23/200 1074/1074 [==============================] - 1s 629us/step - loss: 0.0285 - val_loss: 0.0285 Epoch 24/200 1074/1074 [==============================] - 1s 675us/step - loss: 0.0285 - val_loss: 0.0285 Epoch 25/200 1074/1074 [==============================] - 1s 651us/step - loss: 0.0285 - val_loss: 0.0286 Epoch 26/200 1074/1074 [==============================] - 1s 650us/step - loss: 0.0285 - val_loss: 0.0285 Epoch 27/200 1074/1074 [==============================] - 1s 770us/step - loss: 0.0285 - val_loss: 0.0286 Epoch 28/200 1074/1074 [==============================] - 1s 719us/step - loss: 0.0285 - val_loss: 0.0285 Epoch 29/200 1074/1074 [==============================] - 1s 723us/step - loss: 0.0285 - val_loss: 0.0286 Epoch 30/200 1074/1074 [==============================] - 1s 715us/step - loss: 0.0284 - val_loss: 0.0285 Epoch 31/200 1074/1074 [==============================] - 1s 735us/step - loss: 0.0284 - val_loss: 0.0284 Epoch 32/200 1074/1074 [==============================] - 1s 734us/step - loss: 0.0284 - val_loss: 0.0284 Epoch 33/200 1074/1074 [==============================] - 1s 633us/step - loss: 0.0284 - val_loss: 0.0285 Epoch 34/200 1074/1074 [==============================] - 1s 636us/step - loss: 0.0284 - val_loss: 0.0283 Epoch 35/200 1074/1074 [==============================] - 1s 622us/step - loss: 0.0284 - val_loss: 0.0284 Epoch 36/200 1074/1074 [==============================] - 1s 606us/step - loss: 0.0284 - val_loss: 0.0284 Epoch 37/200 1074/1074 [==============================] - 1s 602us/step - loss: 0.0284 - val_loss: 0.0284 Epoch 38/200 1074/1074 [==============================] - 1s 615us/step - loss: 0.0283 - val_loss: 0.0284 Epoch 39/200 1074/1074 [==============================] - 1s 729us/step - loss: 0.0283 - val_loss: 0.0284 Epoch 40/200 1074/1074 [==============================] - 1s 799us/step - loss: 0.0283 - val_loss: 0.0283 Epoch 41/200 1074/1074 [==============================] - 1s 703us/step - loss: 0.0283 - val_loss: 0.0283 Epoch 42/200 1074/1074 [==============================] - 1s 705us/step - loss: 0.0283 - val_loss: 0.0284 Epoch 43/200 1074/1074 [==============================] - 1s 644us/step - loss: 0.0283 - val_loss: 0.0283 Epoch 44/200 1074/1074 [==============================] - 1s 695us/step - loss: 0.0283 - val_loss: 0.0283 Epoch 45/200 1074/1074 [==============================] - 1s 658us/step - loss: 0.0283 - val_loss: 0.0285 Epoch 46/200 1074/1074 [==============================] - 1s 617us/step - loss: 0.0283 - val_loss: 0.0283 Epoch 47/200 1074/1074 [==============================] - 1s 627us/step - loss: 0.0283 - val_loss: 0.0283 Epoch 48/200 1074/1074 [==============================] - 1s 619us/step - loss: 0.0283 - val_loss: 0.0282 Epoch 49/200 1074/1074 [==============================] - 1s 614us/step - loss: 0.0282 - val_loss: 0.0283 Epoch 50/200 1074/1074 [==============================] - 1s 603us/step - loss: 0.0282 - val_loss: 0.0282 Epoch 51/200 1074/1074 [==============================] - 1s 596us/step - loss: 0.0282 - val_loss: 0.0283 Epoch 52/200 1074/1074 [==============================] - 1s 605us/step - loss: 0.0282 - val_loss: 0.0282 Epoch 53/200 1074/1074 [==============================] - 1s 614us/step - loss: 0.0282 - val_loss: 0.0282 Epoch 54/200 1074/1074 [==============================] - 1s 603us/step - loss: 0.0282 - val_loss: 0.0283 Epoch 55/200 1074/1074 [==============================] - 1s 612us/step - loss: 0.0282 - val_loss: 0.0282 Epoch 56/200 1074/1074 [==============================] - 1s 604us/step - loss: 0.0282 - val_loss: 0.0283 Epoch 57/200 1074/1074 [==============================] - 1s 642us/step - loss: 0.0281 - val_loss: 0.0282 Epoch 58/200 1074/1074 [==============================] - 1s 623us/step - loss: 0.0282 - val_loss: 0.0282 Epoch 59/200 1074/1074 [==============================] - 1s 600us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 60/200 1074/1074 [==============================] - 1s 601us/step - loss: 0.0281 - val_loss: 0.0283 Epoch 61/200 1074/1074 [==============================] - 1s 602us/step - loss: 0.0281 - val_loss: 0.0283 Epoch 62/200 1074/1074 [==============================] - 1s 601us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 63/200 1074/1074 [==============================] - 1s 606us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 64/200 1074/1074 [==============================] - 1s 616us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 65/200 1074/1074 [==============================] - 1s 605us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 66/200 1074/1074 [==============================] - 1s 605us/step - loss: 0.0281 - val_loss: 0.0282 Epoch 67/200 1074/1074 [==============================] - 1s 610us/step - loss: 0.0281 - val_loss: 0.0282 Epoch 68/200 1074/1074 [==============================] - 1s 630us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 69/200 1074/1074 [==============================] - 1s 621us/step - loss: 0.0281 - val_loss: 0.0281 Epoch 70/200 1074/1074 [==============================] - 1s 611us/step - loss: 0.0280 - val_loss: 0.0282 Epoch 71/200 1074/1074 [==============================] - 1s 606us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 72/200 1074/1074 [==============================] - 1s 604us/step - loss: 0.0280 - val_loss: 0.0281 Epoch 73/200 1074/1074 [==============================] - 1s 631us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 74/200 1074/1074 [==============================] - 1s 604us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 75/200 1074/1074 [==============================] - 1s 606us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 76/200 1074/1074 [==============================] - 1s 628us/step - loss: 0.0280 - val_loss: 0.0281 Epoch 77/200 1074/1074 [==============================] - 1s 605us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 78/200 1074/1074 [==============================] - 1s 614us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 79/200 1074/1074 [==============================] - 1s 612us/step - loss: 0.0280 - val_loss: 0.0279 Epoch 80/200 1074/1074 [==============================] - 1s 611us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 81/200 1074/1074 [==============================] - 1s 593us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 82/200 1074/1074 [==============================] - 1s 606us/step - loss: 0.0280 - val_loss: 0.0280 Epoch 83/200 1074/1074 [==============================] - 1s 604us/step - loss: 0.0279 - val_loss: 0.0281 Epoch 84/200 1074/1074 [==============================] - 1s 600us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 85/200 1074/1074 [==============================] - 1s 630us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 86/200 1074/1074 [==============================] - 1s 603us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 87/200 1074/1074 [==============================] - 1s 599us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 88/200 1074/1074 [==============================] - 1s 599us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 89/200 1074/1074 [==============================] - 1s 634us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 90/200 1074/1074 [==============================] - 1s 600us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 91/200 1074/1074 [==============================] - 1s 611us/step - loss: 0.0279 - val_loss: 0.0279 Epoch 92/200 1074/1074 [==============================] - 1s 619us/step - loss: 0.0279 - val_loss: 0.0280 Epoch 93/200 1074/1074 [==============================] - 1s 610us/step - loss: 0.0279 - val_loss: 0.0278 Epoch 94/200 1074/1074 [==============================] - 1s 640us/step - loss: 0.0279 - val_loss: 0.0278 Epoch 95/200 1074/1074 [==============================] - 1s 601us/step - loss: 0.0279 - val_loss: 0.0278 Epoch 96/200 1074/1074 [==============================] - 1s 599us/step - loss: 0.0278 - val_loss: 0.0278 Epoch 97/200 1074/1074 [==============================] - 1s 599us/step - loss: 0.0278 - val_loss: 0.0279 Epoch 98/200 1074/1074 [==============================] - 1s 827us/step - loss: 0.0278 - val_loss: 0.0278 Epoch 99/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0278 - val_loss: 0.0279 Epoch 100/200 1074/1074 [==============================] - 1s 928us/step - loss: 0.0278 - val_loss: 0.0279 Epoch 101/200 1074/1074 [==============================] - 2s 2ms/step - loss: 0.0278 - val_loss: 0.0278 Epoch 102/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0278 - val_loss: 0.0280 Epoch 103/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0278 - val_loss: 0.0279 Epoch 104/200 1074/1074 [==============================] - 1s 662us/step - loss: 0.0278 - val_loss: 0.0278 Epoch 105/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0278 - val_loss: 0.0281 Epoch 106/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0278 - val_loss: 0.0277 Epoch 107/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0278 - val_loss: 0.0277 Epoch 108/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0277 - val_loss: 0.0277 Epoch 109/200 1074/1074 [==============================] - 2s 2ms/step - loss: 0.0277 - val_loss: 0.0280 Epoch 110/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0277 - val_loss: 0.0277 Epoch 111/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0277 - val_loss: 0.0278 Epoch 112/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0277 - val_loss: 0.0278 Epoch 113/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0277 - val_loss: 0.0278 Epoch 114/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0277 - val_loss: 0.0278 Epoch 115/200 1074/1074 [==============================] - 2s 1ms/step - loss: 0.0277 - val_loss: 0.0277 Epoch 116/200 1074/1074 [==============================] - 2s 2ms/step - loss: 0.0277 - val_loss: 0.0277 Epoch 117/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0277 - val_loss: 0.0277 Epoch 118/200 1074/1074 [==============================] - 1s 944us/step - loss: 0.0277 - val_loss: 0.0276 Epoch 119/200 1074/1074 [==============================] - 1s 873us/step - loss: 0.0277 - val_loss: 0.0276 Epoch 120/200 1074/1074 [==============================] - 1s 899us/step - loss: 0.0277 - val_loss: 0.0277 Epoch 121/200 1074/1074 [==============================] - 1s 861us/step - loss: 0.0277 - val_loss: 0.0277 Epoch 122/200 1074/1074 [==============================] - 1s 940us/step - loss: 0.0276 - val_loss: 0.0276 Epoch 123/200 1074/1074 [==============================] - 1s 887us/step - loss: 0.0276 - val_loss: 0.0277 Epoch 124/200 1074/1074 [==============================] - 1s 859us/step - loss: 0.0276 - val_loss: 0.0278 Epoch 125/200 1074/1074 [==============================] - 1s 903us/step - loss: 0.0276 - val_loss: 0.0276 Epoch 126/200 1074/1074 [==============================] - 1s 901us/step - loss: 0.0276 - val_loss: 0.0277 Epoch 127/200 1074/1074 [==============================] - 1s 914us/step - loss: 0.0276 - val_loss: 0.0276 Epoch 128/200 1074/1074 [==============================] - 1s 857us/step - loss: 0.0276 - val_loss: 0.0277 Epoch 129/200 1074/1074 [==============================] - 1s 891us/step - loss: 0.0276 - val_loss: 0.0276 Epoch 130/200 1074/1074 [==============================] - 1s 850us/step - loss: 0.0276 - val_loss: 0.0276 Epoch 131/200 1074/1074 [==============================] - 1s 862us/step - loss: 0.0276 - val_loss: 0.0277 Epoch 132/200 1074/1074 [==============================] - 1s 896us/step - loss: 0.0276 - val_loss: 0.0276 Epoch 133/200 1074/1074 [==============================] - 1s 862us/step - loss: 0.0276 - val_loss: 0.0277 Epoch 134/200 1074/1074 [==============================] - 1s 857us/step - loss: 0.0275 - val_loss: 0.0276 Epoch 135/200 1074/1074 [==============================] - 1s 898us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 136/200 1074/1074 [==============================] - 1s 847us/step - loss: 0.0275 - val_loss: 0.0280 Epoch 137/200 1074/1074 [==============================] - 1s 908us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 138/200 1074/1074 [==============================] - 1s 960us/step - loss: 0.0275 - val_loss: 0.0276 Epoch 139/200 1074/1074 [==============================] - 1s 867us/step - loss: 0.0275 - val_loss: 0.0276 Epoch 140/200 1074/1074 [==============================] - 1s 908us/step - loss: 0.0275 - val_loss: 0.0276 Epoch 141/200 1074/1074 [==============================] - 1s 858us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 142/200 1074/1074 [==============================] - 1s 910us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 143/200 1074/1074 [==============================] - 1s 858us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 144/200 1074/1074 [==============================] - 1s 895us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 145/200 1074/1074 [==============================] - 1s 856us/step - loss: 0.0275 - val_loss: 0.0277 Epoch 146/200 1074/1074 [==============================] - 1s 881us/step - loss: 0.0275 - val_loss: 0.0275 Epoch 147/200 1074/1074 [==============================] - 1s 860us/step - loss: 0.0275 - val_loss: 0.0279 Epoch 148/200 1074/1074 [==============================] - 1s 995us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 149/200 1074/1074 [==============================] - 1s 926us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 150/200 1074/1074 [==============================] - 1s 843us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 151/200 1074/1074 [==============================] - 1s 894us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 152/200 1074/1074 [==============================] - 1s 861us/step - loss: 0.0274 - val_loss: 0.0276 Epoch 153/200 1074/1074 [==============================] - 1s 898us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 154/200 1074/1074 [==============================] - 1s 871us/step - loss: 0.0274 - val_loss: 0.0275 Epoch 155/200 1074/1074 [==============================] - 1s 861us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 156/200 1074/1074 [==============================] - 1s 851us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 157/200 1074/1074 [==============================] - 1s 959us/step - loss: 0.0274 - val_loss: 0.0274 Epoch 158/200 1074/1074 [==============================] - 1s 902us/step - loss: 0.0274 - val_loss: 0.0275 Epoch 159/200 1074/1074 [==============================] - 1s 936us/step - loss: 0.0274 - val_loss: 0.0273 Epoch 160/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0274 - val_loss: 0.0276 Epoch 161/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0274 - val_loss: 0.0274 Epoch 162/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0274 - val_loss: 0.0274 Epoch 163/200 1074/1074 [==============================] - 1s 983us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 164/200 1074/1074 [==============================] - 1s 960us/step - loss: 0.0274 - val_loss: 0.0275 Epoch 165/200 1074/1074 [==============================] - 1s 914us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 166/200 1074/1074 [==============================] - 1s 953us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 167/200 1074/1074 [==============================] - 1s 996us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 168/200 1074/1074 [==============================] - 1s 943us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 169/200 1074/1074 [==============================] - 1s 928us/step - loss: 0.0273 - val_loss: 0.0273 Epoch 170/200 1074/1074 [==============================] - 1s 961us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 171/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0273 - val_loss: 0.0274 Epoch 172/200 1074/1074 [==============================] - 1s 961us/step - loss: 0.0273 - val_loss: 0.0273 Epoch 173/200 1074/1074 [==============================] - 1s 912us/step - loss: 0.0273 - val_loss: 0.0273 Epoch 174/200 1074/1074 [==============================] - 1s 862us/step - loss: 0.0273 - val_loss: 0.0272 Epoch 175/200 1074/1074 [==============================] - 1s 976us/step - loss: 0.0273 - val_loss: 0.0274 Epoch 176/200 1074/1074 [==============================] - 1s 892us/step - loss: 0.0272 - val_loss: 0.0272 Epoch 177/200 1074/1074 [==============================] - 1s 906us/step - loss: 0.0272 - val_loss: 0.0274 Epoch 178/200 1074/1074 [==============================] - 1s 910us/step - loss: 0.0272 - val_loss: 0.0272 Epoch 179/200 1074/1074 [==============================] - 1s 939us/step - loss: 0.0272 - val_loss: 0.0273 Epoch 180/200 1074/1074 [==============================] - 1s 1ms/step - loss: 0.0272 - val_loss: 0.0274 Epoch 181/200 1074/1074 [==============================] - 1s 870us/step - loss: 0.0272 - val_loss: 0.0272 Epoch 182/200 1074/1074 [==============================] - 1s 864us/step - loss: 0.0272 - val_loss: 0.0275 Epoch 183/200 1074/1074 [==============================] - 1s 908us/step - loss: 0.0272 - val_loss: 0.0272 Epoch 184/200 1074/1074 [==============================] - 1s 869us/step - loss: 0.0272 - val_loss: 0.0273 Epoch 185/200 1074/1074 [==============================] - 1s 901us/step - loss: 0.0272 - val_loss: 0.0273 Epoch 186/200 1074/1074 [==============================] - 1s 860us/step - loss: 0.0272 - val_loss: 0.0273 Epoch 187/200 1074/1074 [==============================] - 1s 900us/step - loss: 0.0272 - val_loss: 0.0273 Epoch 188/200 1074/1074 [==============================] - 1s 856us/step - loss: 0.0272 - val_loss: 0.0271 Epoch 189/200 1074/1074 [==============================] - 1s 904us/step - loss: 0.0272 - val_loss: 0.0272 Epoch 190/200 1074/1074 [==============================] - 1s 867us/step - loss: 0.0272 - val_loss: 0.0272 Epoch 191/200 1074/1074 [==============================] - 1s 974us/step - loss: 0.0271 - val_loss: 0.0272 Epoch 192/200 1074/1074 [==============================] - 1s 896us/step - loss: 0.0271 - val_loss: 0.0272 Epoch 193/200 1074/1074 [==============================] - 1s 915us/step - loss: 0.0271 - val_loss: 0.0272 Epoch 194/200 1074/1074 [==============================] - 1s 940us/step - loss: 0.0271 - val_loss: 0.0272 Epoch 195/200 1074/1074 [==============================] - 1s 860us/step - loss: 0.0271 - val_loss: 0.0271 Epoch 196/200 1074/1074 [==============================] - 1s 962us/step - loss: 0.0271 - val_loss: 0.0271 Epoch 197/200 1074/1074 [==============================] - 1s 877us/step - loss: 0.0271 - val_loss: 0.0271 Epoch 198/200 1074/1074 [==============================] - 1s 869us/step - loss: 0.0271 - val_loss: 0.0271 Epoch 199/200 1074/1074 [==============================] - 1s 909us/step - loss: 0.0271 - val_loss: 0.0271 Epoch 200/200 1074/1074 [==============================] - 1s 873us/step - loss: 0.0271 - val_loss: 0.0271
plot_loss(history_linear_12, 'loss_linear_12.pdf', 'Full linear model (predict 6 forces from 12 input features)')
# save model loss on test set for evaluation section below
test_results['linear_12'] = linear_model_12.evaluate(X_test_12, Y_test, verbose=0)
def plot_pred_vs_true(pred, true, name, titles=None):
fig = plt.figure(figsize=(30,16))
for i in range(len(pred.T)):
ax = fig.add_subplot(2,3,i+1)
ax.scatter(true.T[i], pred.T[i])
ax.plot([np.min(pred), np.max(pred)],[np.min(pred), np.max(pred)], linestyle='dashed', linewidth=2, color='k')
ax.set_xlabel('True Values')
ax.set_ylabel('Predictions')
if len(titles)>=len(pred.T):
ax.set_title(titles[i])
plt.savefig(output_dir/'pred_vs_true_{}.pdf'.format(name))
Y_test_pred_linear_12 = linear_model_12.predict(X_test_12)
plot_pred_vs_true(Y_test_pred_linear_12, Y_test, 'pred_vs_true_linear_12', titles = ['$f_{x_1}$','$f_{y_1}$','$f_{z_1}$','$f_{x_2}$','$f_{y_2}$','$f_{z_2}$'])
I experimented by varying the number of layers, dropout rate, learning rate, batch size—the model below achieves pretty good performance.
# The Sequential model will do just fine here
# (functionl API is more flexible though)
def setup_dnn_model(n_outputs):
model = keras.Sequential([
#layers.BatchNormalization(),
layers.Dense(100, activation='relu'),
layers.Dropout(0.05),
layers.Dense(100, activation='relu'),
layers.Dropout(0.05),
layers.Dense(100, activation='relu'),
layers.Dropout(0.05),
layers.Dense(100, activation='relu'),
layers.Dropout(0.05),
layers.Dense(100, activation='relu'),
layers.Dense(n_outputs)
])
model.compile(loss='mean_squared_error',
optimizer=tf.keras.optimizers.Adam(learning_rate=1e-3, decay=5e-6))
return model
# dnn config
dnn_tag = "dnn_100x5_05dropout"
dnn_epochs = 500
dnn_batch_size = 32
dnn_model_12 = setup_dnn_model(Y_train.shape[-1])
dnn_model_12_tag = "{}_12features".format(dnn_tag)
%%time
early_stop = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=20)
save_every_epoch = tf.keras.callbacks.ModelCheckpoint(output_dir/'dnn_12_tmp.h5', monitor='val_loss', save_freq='epoch')
history_dnn_12 = dnn_model_12.fit(
X_train_12, Y_train,
validation_data=(X_val_12, Y_val),
batch_size = dnn_batch_size,
epochs=dnn_epochs,
callbacks=[save_every_epoch]
#callbacks=[early_stop, save_every_epoch]
#verbose=0,
)
dnn_model_12.summary()
with open(output_dir/'history_dnn_12.pickle', 'wb') as f:
pickle.dump(history_dnn_12.history, f)
Epoch 1/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0207 - val_loss: 0.0077
Epoch 2/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0072 - val_loss: 0.0041
Epoch 3/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0051 - val_loss: 0.0037
Epoch 4/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0043 - val_loss: 0.0031
Epoch 5/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0038 - val_loss: 0.0027
Epoch 6/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0035 - val_loss: 0.0026
Epoch 7/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0033 - val_loss: 0.0033
Epoch 8/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0032 - val_loss: 0.0024
Epoch 9/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0031 - val_loss: 0.0026
Epoch 10/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0029 - val_loss: 0.0022
Epoch 11/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0028 - val_loss: 0.0027
Epoch 12/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0028 - val_loss: 0.0020
Epoch 13/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0026 - val_loss: 0.0018
Epoch 14/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0027 - val_loss: 0.0018
Epoch 15/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0026 - val_loss: 0.0020
Epoch 16/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0026 - val_loss: 0.0020
Epoch 17/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0025 - val_loss: 0.0017
Epoch 18/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0024 - val_loss: 0.0018
Epoch 19/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0024 - val_loss: 0.0018
Epoch 20/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0024 - val_loss: 0.0018
Epoch 21/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0017
Epoch 22/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0024 - val_loss: 0.0016
Epoch 23/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0017
Epoch 24/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0023 - val_loss: 0.0016
Epoch 25/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0015
Epoch 26/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0016
Epoch 27/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0015
Epoch 28/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0016
Epoch 29/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0018
Epoch 30/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0027
Epoch 31/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0019
Epoch 32/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0015
Epoch 33/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0016
Epoch 34/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0016
Epoch 35/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0015
Epoch 36/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0022
Epoch 37/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0015
Epoch 38/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0021 - val_loss: 0.0017
Epoch 39/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0020 - val_loss: 0.0015
Epoch 40/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0019
Epoch 41/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0014
Epoch 42/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0014
Epoch 43/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0016
Epoch 44/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0018
Epoch 45/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0016
Epoch 46/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0014
Epoch 47/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0016
Epoch 48/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0016
Epoch 49/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0013
Epoch 50/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0017
Epoch 51/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0014
Epoch 52/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0019
Epoch 53/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0018
Epoch 54/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0014
Epoch 55/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0013
Epoch 56/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0015
Epoch 57/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0013
Epoch 58/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0016
Epoch 59/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0018 - val_loss: 0.0014
Epoch 60/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0015
Epoch 61/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0018
Epoch 62/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0016
Epoch 63/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 64/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0017 - val_loss: 0.0015
Epoch 65/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 66/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 67/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 68/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 69/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 70/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 71/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 72/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 73/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 74/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0016 - val_loss: 0.0015
Epoch 75/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 76/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0014
Epoch 77/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 78/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 79/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 80/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 81/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 82/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 83/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 84/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 85/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 86/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 87/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 88/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 89/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0015
Epoch 90/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 91/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0013
Epoch 92/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 93/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 94/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 95/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 96/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 97/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 98/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 99/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 100/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 101/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0014
Epoch 102/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 103/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 104/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 105/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0013
Epoch 106/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 107/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 108/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0014
Epoch 109/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 110/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 111/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 112/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 113/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 114/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 115/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0014
Epoch 116/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0011
Epoch 117/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 118/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0017
Epoch 119/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0014
Epoch 120/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 121/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 122/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0013
Epoch 123/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 124/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0016
Epoch 125/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 126/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 127/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0015
Epoch 128/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 129/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 130/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 131/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 132/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 133/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0014
Epoch 134/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 135/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 136/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 137/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 138/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 139/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 140/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 141/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 142/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 143/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 144/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 145/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0014
Epoch 146/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 147/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 148/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 149/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0014
Epoch 150/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0015
Epoch 151/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 152/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 153/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 154/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 155/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 156/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 157/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 158/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 159/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0016
Epoch 160/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 161/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 162/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 163/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 164/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 165/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 166/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 167/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 168/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 169/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 170/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 171/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 172/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 173/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0013
Epoch 174/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 175/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 176/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0012
Epoch 177/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 178/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 179/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 180/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 181/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 182/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 183/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 184/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 185/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 186/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 187/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 188/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 189/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 190/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 191/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 192/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 193/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 194/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 195/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 196/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 197/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 198/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 199/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 200/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 201/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 202/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 203/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 204/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 205/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 206/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 207/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 208/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 209/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 210/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 211/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 212/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 213/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 214/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 215/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 216/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 217/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 218/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 219/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 220/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 221/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 222/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 223/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 224/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 225/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 226/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 227/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0012
Epoch 228/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 229/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 230/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 231/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 232/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 233/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 234/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 235/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 236/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 237/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 238/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 239/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 240/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0012 - val_loss: 9.9608e-04
Epoch 241/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 242/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 243/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 244/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 245/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 246/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 247/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 248/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 249/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 250/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 251/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 252/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 253/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 254/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 255/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 256/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 257/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 258/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 259/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 260/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 261/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 262/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.8749e-04
Epoch 263/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 264/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 265/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 266/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 267/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 268/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 269/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 270/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 271/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 272/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 273/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0015
Epoch 274/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0013
Epoch 275/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 276/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.9080e-04
Epoch 277/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 278/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 279/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 280/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 281/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 282/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 283/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 284/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 285/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 286/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 287/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 288/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 289/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 290/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 291/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 292/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 293/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 294/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 295/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 296/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 297/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.9977e-04
Epoch 298/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 299/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 300/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 301/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 302/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 303/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.9612e-04
Epoch 304/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 305/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 306/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 307/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8888e-04
Epoch 308/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 309/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 310/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 311/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 312/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 313/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 314/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 315/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 316/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 317/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 318/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 319/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 320/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 321/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8763e-04
Epoch 322/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 323/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 324/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 325/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 326/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 327/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 328/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 329/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 330/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 331/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 332/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 333/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 334/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 335/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 336/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 337/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 338/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 339/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.7093e-04
Epoch 340/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 341/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 342/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 343/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 344/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 345/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 346/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 347/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8835e-04
Epoch 348/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9773e-04
Epoch 349/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 350/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 351/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 352/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 353/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 354/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 355/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 356/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9710e-04
Epoch 357/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8096e-04
Epoch 358/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 359/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 360/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 361/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 362/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0015
Epoch 363/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 364/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 365/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 366/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 367/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 368/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.6955e-04
Epoch 369/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 370/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 371/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 372/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 373/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 374/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 375/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 376/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9051e-04
Epoch 377/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 378/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 379/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 380/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9361e-04
Epoch 381/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 382/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 383/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 384/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 385/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9886e-04
Epoch 386/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 387/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 388/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 389/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 390/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 391/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 392/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 393/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 394/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9388e-04
Epoch 395/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 396/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 397/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 398/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8968e-04
Epoch 399/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 400/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 401/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 402/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.5940e-04
Epoch 403/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 404/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 405/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 406/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 407/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 408/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 409/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 410/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 411/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 412/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 413/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 414/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 415/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 416/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 417/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 418/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 419/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9743e-04
Epoch 420/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 421/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 422/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 423/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 424/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 425/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8048e-04
Epoch 426/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 427/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8656e-04
Epoch 428/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 429/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 430/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 431/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 432/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 433/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 434/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 435/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 436/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 437/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 438/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.8094e-04
Epoch 439/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 440/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 441/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 442/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 443/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 444/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 445/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 446/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 447/500
1074/1074 [==============================] - ETA: 0s - loss: 0.001 - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 448/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 449/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 450/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 451/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 452/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 453/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 454/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 455/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 456/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 457/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 458/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 459/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 460/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 461/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 462/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 463/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 464/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 465/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 466/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 467/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 468/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 469/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 470/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0012
Epoch 471/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 472/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 473/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 474/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 475/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 476/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 477/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 478/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 479/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 480/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 481/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 482/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 483/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 484/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 485/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 486/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 487/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 488/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 489/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 490/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 9.6944e-04
Epoch 491/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 492/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 493/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 494/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.9075e-04
Epoch 495/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 496/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 497/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 498/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 499/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 500/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 100) 1300
dropout (Dropout) (None, 100) 0
dense_1 (Dense) (None, 100) 10100
dropout_1 (Dropout) (None, 100) 0
dense_2 (Dense) (None, 100) 10100
dropout_2 (Dropout) (None, 100) 0
dense_3 (Dense) (None, 100) 10100
dropout_3 (Dropout) (None, 100) 0
dense_4 (Dense) (None, 100) 10100
dense_5 (Dense) (None, 6) 606
=================================================================
Total params: 42,306
Trainable params: 42,306
Non-trainable params: 0
_________________________________________________________________
CPU times: user 30min 3s, sys: 13min 23s, total: 43min 26s
Wall time: 17min 9s
dnn_model_12.save(output_dir/"{}_{}.h5".format(dnn_model_12_tag, timestamp))
plot_loss(history_dnn_12, 'loss_{}.pdf'.format(dnn_model_12_tag), 'DNN model (predict 6 forces from 12 input features)')
# save model loss on test set for evaluation section below
test_results['dnn_12'] = dnn_model_12.evaluate(X_test_12, Y_test, verbose=0)
Y_test_pred_dnn_12 = dnn_model_12.predict(X_test_12)
plot_pred_vs_true(Y_test_pred_dnn_12, Y_test, 'pred_vs_true_{}'.format(dnn_model_12_tag), titles = ['$f_{x_1}$','$f_{y_1}$','$f_{z_1}$','$f_{x_2}$','$f_{y_2}$','$f_{z_2}$'])
Up to 2nd order derivatives for 12 input features
dnn_model_36 = setup_dnn_model(Y_train.shape[-1])
dnn_model_36_tag = "{}_36features".format(dnn_tag)
%%time
early_stop = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=20)
save_every_epoch = tf.keras.callbacks.ModelCheckpoint(output_dir/'dnn_36_tmp.h5', monitor='val_loss', save_freq='epoch')
history_dnn_36 = dnn_model_36.fit(
X_train_36, Y_train,
validation_data=(X_val_36, Y_val),
batch_size = dnn_batch_size,
epochs=dnn_epochs,
callbacks=[save_every_epoch]
#callbacks=[early_stop, save_every_epoch]
#verbose=0,
)
dnn_model_36.summary()
with open(output_dir/'history_dnn_36.pickle', 'wb') as f:
pickle.dump(history_dnn_36.history, f)
Epoch 1/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0201 - val_loss: 0.0089
Epoch 2/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0083 - val_loss: 0.0055
Epoch 3/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0059 - val_loss: 0.0036
Epoch 4/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0045 - val_loss: 0.0028
Epoch 5/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0037 - val_loss: 0.0035
Epoch 6/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0035 - val_loss: 0.0022
Epoch 7/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0030 - val_loss: 0.0019
Epoch 8/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0032 - val_loss: 0.0018
Epoch 9/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0027 - val_loss: 0.0023
Epoch 10/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0028 - val_loss: 0.0020
Epoch 11/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0025 - val_loss: 0.0017
Epoch 12/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0025 - val_loss: 0.0015
Epoch 13/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0024 - val_loss: 0.0014
Epoch 14/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0014
Epoch 15/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0023 - val_loss: 0.0013
Epoch 16/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0023
Epoch 17/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0019
Epoch 18/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0022 - val_loss: 0.0014
Epoch 19/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0011
Epoch 20/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0021 - val_loss: 0.0017
Epoch 21/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0019 - val_loss: 0.0012
Epoch 22/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0014
Epoch 23/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0011
Epoch 24/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 25/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0012
Epoch 26/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0014
Epoch 27/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 28/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0016
Epoch 29/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 30/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 9.9531e-04
Epoch 31/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 9.2567e-04
Epoch 32/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 33/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0010
Epoch 34/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 35/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011ETA: 0s - l
Epoch 36/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0018
Epoch 37/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 9.1125e-04
Epoch 38/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 9.8218e-04
Epoch 39/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 8.2299e-04
Epoch 40/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 9.9542e-04
Epoch 41/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 42/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0010
Epoch 43/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 44/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0010
Epoch 45/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 9.8542e-04
Epoch 46/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 9.5411e-04
Epoch 47/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.6341e-04
Epoch 48/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 8.4916e-04
Epoch 49/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 50/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.8436e-04
Epoch 51/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 52/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0015
Epoch 53/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0013
Epoch 54/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.6940e-04
Epoch 55/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.4494e-04
Epoch 56/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 57/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.1040e-04
Epoch 58/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 7.7084e-04
Epoch 59/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.8114e-04
Epoch 60/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.4028e-04
Epoch 61/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 62/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 8.1444e-04 lo
Epoch 63/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.7118e-04
Epoch 64/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0014
Epoch 65/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.3312e-04
Epoch 66/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.1271e-04
Epoch 67/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.5469e-04
Epoch 68/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.8840e-04
Epoch 69/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.9081e-04
Epoch 70/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 7.8795e-04
Epoch 71/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.0119e-04
Epoch 72/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.0589e-04
Epoch 73/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 7.3963e-04
Epoch 74/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 75/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.2789e-04
Epoch 76/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 77/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.3022e-04
Epoch 78/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.3634e-04
Epoch 79/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.5879e-04
Epoch 80/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.5660e-04
Epoch 81/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.3050e-04
Epoch 82/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 8.4838e-04
Epoch 83/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.5775e-04
Epoch 84/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0015
Epoch 85/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 7.0057e-04
Epoch 86/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.5246e-04
Epoch 87/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.8603e-04 - val_loss: 8.0050e-04
Epoch 88/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.9164e-04
Epoch 89/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 8.3073e-04
Epoch 90/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0012
Epoch 91/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 7.5717e-04
Epoch 92/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.5463e-04
Epoch 93/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.8064e-04 - val_loss: 8.3318e-04
Epoch 94/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 95/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 8.5051e-04
Epoch 96/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.2623e-04
Epoch 97/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 98/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7836e-04 - val_loss: 7.9044e-04
Epoch 99/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.8323e-04 - val_loss: 7.1761e-04
Epoch 100/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.1429e-04
Epoch 101/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.9822e-04 - val_loss: 7.8550e-04
Epoch 102/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.2230e-04
Epoch 103/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.5667e-04 - val_loss: 7.2687e-04
Epoch 104/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.8325e-04 - val_loss: 0.0012
Epoch 105/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.6965e-04
Epoch 106/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.9149e-04
Epoch 107/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.3283e-04
Epoch 108/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 9.4613e-04
Epoch 109/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3916e-04 - val_loss: 6.8566e-04
Epoch 110/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0012
Epoch 111/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 8.1296e-04
Epoch 112/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0070e-04 - val_loss: 8.2056e-04
Epoch 113/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7280e-04 - val_loss: 6.8727e-04
Epoch 114/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 115/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6027e-04 - val_loss: 7.7016e-04
Epoch 116/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1366e-04 - val_loss: 6.8129e-04
Epoch 117/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.0899e-04
Epoch 118/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.4355e-04
Epoch 119/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2514e-04 - val_loss: 6.8508e-04
Epoch 120/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1027e-04 - val_loss: 7.0112e-04
Epoch 121/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1129e-04 - val_loss: 6.3309e-04
Epoch 122/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2651e-04 - val_loss: 8.9029e-04
Epoch 123/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3880e-04 - val_loss: 6.6461e-04
Epoch 124/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6815e-04 - val_loss: 7.5587e-04
Epoch 125/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2830e-04 - val_loss: 7.9682e-04
Epoch 126/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9823e-04 - val_loss: 8.6703e-04
Epoch 127/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3032e-04 - val_loss: 7.1242e-04
Epoch 128/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1548e-04 - val_loss: 7.1334e-04
Epoch 129/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0034e-04 - val_loss: 7.4397e-04
Epoch 130/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4710e-04 - val_loss: 8.1068e-04
Epoch 131/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0933e-04 - val_loss: 8.1706e-04
Epoch 132/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1670e-04 - val_loss: 8.7928e-04
Epoch 133/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9209e-04 - val_loss: 6.4726e-04
Epoch 134/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8012e-04 - val_loss: 6.7990e-04
Epoch 135/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8024e-04 - val_loss: 6.6417e-04ETA: 0s - loss:
Epoch 136/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3763e-04 - val_loss: 7.5523e-04
Epoch 137/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6363e-04 - val_loss: 7.0456e-04
Epoch 138/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4098e-04 - val_loss: 6.8696e-04
Epoch 139/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1430e-04 - val_loss: 6.9542e-04
Epoch 140/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6404e-04 - val_loss: 6.5520e-04
Epoch 141/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6074e-04 - val_loss: 6.8265e-04
Epoch 142/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6402e-04 - val_loss: 7.8647e-04
Epoch 143/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6373e-04 - val_loss: 6.5939e-04
Epoch 144/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8908e-04 - val_loss: 6.7252e-04
Epoch 145/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5652e-04 - val_loss: 7.8410e-04
Epoch 146/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0101e-04 - val_loss: 5.4289e-04
Epoch 147/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3129e-04 - val_loss: 7.0886e-04
Epoch 148/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0236e-04 - val_loss: 6.8652e-04
Epoch 149/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2627e-04 - val_loss: 5.9777e-04
Epoch 150/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3372e-04 - val_loss: 6.9704e-04
Epoch 151/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6686e-04 - val_loss: 9.6236e-04
Epoch 152/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8280e-04 - val_loss: 7.1356e-04
Epoch 153/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2080e-04 - val_loss: 6.0945e-04
Epoch 154/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7992e-04 - val_loss: 6.1058e-04
Epoch 155/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0692e-04 - val_loss: 5.6215e-04
Epoch 156/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9094e-04 - val_loss: 6.8528e-04
Epoch 157/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8202e-04 - val_loss: 6.5776e-04
Epoch 158/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5070e-04 - val_loss: 7.0865e-04
Epoch 159/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3891e-04 - val_loss: 6.3057e-04
Epoch 160/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0087e-04 - val_loss: 6.8777e-04
Epoch 161/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4973e-04 - val_loss: 6.4667e-04
Epoch 162/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2815e-04 - val_loss: 6.5162e-04
Epoch 163/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3370e-04 - val_loss: 6.7285e-04
Epoch 164/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8197e-04 - val_loss: 0.0011
Epoch 165/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1938e-04 - val_loss: 7.6903e-04
Epoch 166/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4306e-04 - val_loss: 7.0762e-04
Epoch 167/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0129e-04 - val_loss: 6.4855e-04
Epoch 168/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9215e-04 - val_loss: 5.8036e-04
Epoch 169/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2586e-04 - val_loss: 7.4988e-04
Epoch 170/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0861e-04 - val_loss: 8.4907e-04
Epoch 171/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1511e-04 - val_loss: 6.5337e-04
Epoch 172/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1764e-04 - val_loss: 6.5360e-04
Epoch 173/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1828e-04 - val_loss: 6.4048e-04
Epoch 174/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9555e-04 - val_loss: 6.1459e-04
Epoch 175/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4524e-04 - val_loss: 5.9518e-04
Epoch 176/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1741e-04 - val_loss: 7.2120e-04
Epoch 177/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5205e-04 - val_loss: 7.3901e-04
Epoch 178/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9450e-04 - val_loss: 6.7237e-04
Epoch 179/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8179e-04 - val_loss: 6.3819e-04
Epoch 180/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9290e-04 - val_loss: 7.4081e-04
Epoch 181/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8238e-04 - val_loss: 6.5104e-04
Epoch 182/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2102e-04 - val_loss: 5.5339e-04
Epoch 183/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0095e-04 - val_loss: 6.3186e-04
Epoch 184/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9975e-04 - val_loss: 8.6658e-04
Epoch 185/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7654e-04 - val_loss: 6.2222e-04
Epoch 186/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6310e-04 - val_loss: 5.8415e-04
Epoch 187/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6332e-04 - val_loss: 7.1905e-04
Epoch 188/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6210e-04 - val_loss: 6.0330e-04
Epoch 189/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7812e-04 - val_loss: 6.8526e-04
Epoch 190/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7840e-04 - val_loss: 5.8297e-04A: 0s - loss: 7.85
Epoch 191/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2878e-04 - val_loss: 7.7418e-04
Epoch 192/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6644e-04 - val_loss: 8.0805e-04
Epoch 193/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2261e-04 - val_loss: 7.9301e-04
Epoch 194/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6127e-04 - val_loss: 6.0545e-04
Epoch 195/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3666e-04 - val_loss: 6.3215e-04
Epoch 196/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9440e-04 - val_loss: 6.4340e-04
Epoch 197/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7343e-04 - val_loss: 6.6484e-04
Epoch 198/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1070e-04 - val_loss: 6.2110e-04
Epoch 199/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5821e-04 - val_loss: 6.3372e-04
Epoch 200/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7101e-04 - val_loss: 7.1212e-04
Epoch 201/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7573e-04 - val_loss: 6.5161e-04
Epoch 202/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7364e-04 - val_loss: 6.4379e-04
Epoch 203/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6380e-04 - val_loss: 6.2808e-04
Epoch 204/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4241e-04 - val_loss: 5.5971e-04
Epoch 205/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4995e-04 - val_loss: 5.1126e-04
Epoch 206/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4588e-04 - val_loss: 5.8239e-04
Epoch 207/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8436e-04 - val_loss: 6.0886e-04
Epoch 208/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4654e-04 - val_loss: 6.6755e-04
Epoch 209/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3380e-04 - val_loss: 9.4601e-04
Epoch 210/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3934e-04 - val_loss: 5.6207e-04
Epoch 211/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2970e-04 - val_loss: 7.0898e-04
Epoch 212/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4958e-04 - val_loss: 6.0900e-04
Epoch 213/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5103e-04 - val_loss: 5.7410e-04- ETA: 0s - loss:
Epoch 214/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2824e-04 - val_loss: 6.0569e-04
Epoch 215/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9604e-04 - val_loss: 6.1802e-04
Epoch 216/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3406e-04 - val_loss: 5.5625e-04
Epoch 217/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4423e-04 - val_loss: 5.9513e-04
Epoch 218/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3183e-04 - val_loss: 5.8267e-04
Epoch 219/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5219e-04 - val_loss: 5.6824e-04
Epoch 220/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1714e-04 - val_loss: 5.6538e-04
Epoch 221/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1190e-04 - val_loss: 5.5344e-04
Epoch 222/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7923e-04 - val_loss: 5.8623e-04
Epoch 223/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3488e-04 - val_loss: 6.0623e-04
Epoch 224/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4986e-04 - val_loss: 6.4389e-04
Epoch 225/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2142e-04 - val_loss: 6.0555e-04
Epoch 226/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1057e-04 - val_loss: 6.7757e-04
Epoch 227/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4226e-04 - val_loss: 6.1769e-04
Epoch 228/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2462e-04 - val_loss: 6.3646e-04
Epoch 229/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2638e-04 - val_loss: 6.1089e-04
Epoch 230/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3111e-04 - val_loss: 5.6324e-04
Epoch 231/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1523e-04 - val_loss: 5.7918e-04
Epoch 232/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0316e-04 - val_loss: 8.3849e-04
Epoch 233/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8275e-04 - val_loss: 6.2812e-04
Epoch 234/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1720e-04 - val_loss: 5.1381e-04
Epoch 235/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0410e-04 - val_loss: 5.7679e-04
Epoch 236/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1984e-04 - val_loss: 6.0413e-04
Epoch 237/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0999e-04 - val_loss: 6.4859e-04
Epoch 238/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5691e-04 - val_loss: 5.6939e-04
Epoch 239/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0059e-04 - val_loss: 5.9564e-04
Epoch 240/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0454e-04 - val_loss: 5.6055e-04
Epoch 241/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3891e-04 - val_loss: 6.3725e-04
Epoch 242/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9878e-04 - val_loss: 5.4027e-04
Epoch 243/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0127e-04 - val_loss: 5.5738e-04
Epoch 244/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0877e-04 - val_loss: 5.4564e-04
Epoch 245/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1375e-04 - val_loss: 6.1089e-04
Epoch 246/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5973e-04 - val_loss: 5.9062e-04A: 0s - loss: 7.7131e
Epoch 247/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8063e-04 - val_loss: 5.7842e-04
Epoch 248/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9683e-04 - val_loss: 7.4703e-04
Epoch 249/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7994e-04 - val_loss: 6.4007e-04
Epoch 250/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3995e-04 - val_loss: 6.1318e-04
Epoch 251/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0029e-04 - val_loss: 5.2995e-04
Epoch 252/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0295e-04 - val_loss: 5.7995e-04
Epoch 253/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3789e-04 - val_loss: 7.0696e-04
Epoch 254/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1478e-04 - val_loss: 5.7167e-04
Epoch 255/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6972e-04 - val_loss: 5.9152e-04
Epoch 256/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8520e-04 - val_loss: 5.8847e-046.746
Epoch 257/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0114e-04 - val_loss: 5.5512e-04
Epoch 258/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1062e-04 - val_loss: 6.2280e-04
Epoch 259/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9276e-04 - val_loss: 6.7448e-04
Epoch 260/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7448e-04 - val_loss: 5.6805e-04
Epoch 261/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9047e-04 - val_loss: 5.8786e-04
Epoch 262/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0776e-04 - val_loss: 6.5530e-04
Epoch 263/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7250e-04 - val_loss: 5.8028e-04
Epoch 264/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0467e-04 - val_loss: 5.4783e-04
Epoch 265/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7052e-04 - val_loss: 6.0766e-04
Epoch 266/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9229e-04 - val_loss: 5.8752e-04
Epoch 267/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7570e-04 - val_loss: 6.2228e-04
Epoch 268/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9663e-04 - val_loss: 7.2422e-04
Epoch 269/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8432e-04 - val_loss: 9.0012e-04
Epoch 270/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9471e-04 - val_loss: 8.3094e-04
Epoch 271/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8835e-04 - val_loss: 6.3980e-04
Epoch 272/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2729e-04 - val_loss: 7.8566e-04
Epoch 273/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8927e-04 - val_loss: 7.6212e-04
Epoch 274/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0473e-04 - val_loss: 6.3498e-04
Epoch 275/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6550e-04 - val_loss: 6.1174e-04
Epoch 276/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1309e-04 - val_loss: 6.6587e-04
Epoch 277/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4535e-04 - val_loss: 5.5619e-04
Epoch 278/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7628e-04 - val_loss: 5.9939e-04
Epoch 279/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0690e-04 - val_loss: 5.8296e-04
Epoch 280/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6070e-04 - val_loss: 6.2541e-04
Epoch 281/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0100e-04 - val_loss: 5.3988e-04
Epoch 282/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7560e-04 - val_loss: 5.3094e-04
Epoch 283/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8269e-04 - val_loss: 5.5939e-04
Epoch 284/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5713e-04 - val_loss: 5.3246e-04
Epoch 285/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7685e-04 - val_loss: 5.5732e-04
Epoch 286/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7091e-04 - val_loss: 7.7259e-04
Epoch 287/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0489e-04 - val_loss: 6.4923e-04
Epoch 288/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6339e-04 - val_loss: 6.5202e-04
Epoch 289/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6414e-04 - val_loss: 6.7040e-04
Epoch 290/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0681e-04 - val_loss: 5.9481e-04
Epoch 291/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7392e-04 - val_loss: 5.0982e-04
Epoch 292/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5806e-04 - val_loss: 5.8996e-04
Epoch 293/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5204e-04 - val_loss: 6.7361e-04
Epoch 294/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6858e-04 - val_loss: 6.5217e-04
Epoch 295/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9529e-04 - val_loss: 5.6629e-04
Epoch 296/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6636e-04 - val_loss: 7.2206e-04
Epoch 297/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6300e-04 - val_loss: 6.1129e-04
Epoch 298/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3278e-04 - val_loss: 5.7785e-04
Epoch 299/500
1074/1074 [==============================] - 3s 2ms/step - loss: 6.5822e-04 - val_loss: 4.9089e-04
Epoch 300/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8108e-04 - val_loss: 6.4710e-04
Epoch 301/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5023e-04 - val_loss: 5.8818e-04
Epoch 302/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6154e-04 - val_loss: 6.1223e-04
Epoch 303/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7870e-04 - val_loss: 5.6894e-04
Epoch 304/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6288e-04 - val_loss: 5.1943e-04
Epoch 305/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4989e-04 - val_loss: 6.7226e-04
Epoch 306/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4945e-04 - val_loss: 5.7030e-04
Epoch 307/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5374e-04 - val_loss: 5.2308e-04
Epoch 308/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5822e-04 - val_loss: 5.7472e-04
Epoch 309/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3686e-04 - val_loss: 5.7684e-04
Epoch 310/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4150e-04 - val_loss: 7.5977e-04
Epoch 311/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6120e-04 - val_loss: 6.3251e-04
Epoch 312/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4248e-04 - val_loss: 5.2160e-04
Epoch 313/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6576e-04 - val_loss: 6.6339e-04
Epoch 314/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8743e-04 - val_loss: 5.8690e-04
Epoch 315/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4079e-04 - val_loss: 5.8766e-04
Epoch 316/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4083e-04 - val_loss: 5.8767e-04
Epoch 317/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5810e-04 - val_loss: 6.6527e-04
Epoch 318/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2883e-04 - val_loss: 5.7276e-04
Epoch 319/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3822e-04 - val_loss: 5.3236e-04
Epoch 320/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3900e-04 - val_loss: 5.4789e-04
Epoch 321/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5062e-04 - val_loss: 6.1716e-04
Epoch 322/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5608e-04 - val_loss: 5.0492e-04
Epoch 323/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6092e-04 - val_loss: 5.9285e-04
Epoch 324/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4031e-04 - val_loss: 4.8469e-04
Epoch 325/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6973e-04 - val_loss: 7.1657e-04
Epoch 326/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7293e-04 - val_loss: 5.9098e-04
Epoch 327/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1195e-04 - val_loss: 5.4430e-04
Epoch 328/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1637e-04 - val_loss: 5.7470e-04
Epoch 329/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2907e-04 - val_loss: 6.2039e-04
Epoch 330/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4737e-04 - val_loss: 5.1588e-04
Epoch 331/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4430e-04 - val_loss: 6.1489e-04
Epoch 332/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2962e-04 - val_loss: 5.8143e-04
Epoch 333/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6306e-04 - val_loss: 5.2309e-04
Epoch 334/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2749e-04 - val_loss: 6.0349e-04
Epoch 335/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3752e-04 - val_loss: 6.9207e-04
Epoch 336/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2529e-04 - val_loss: 6.0828e-04
Epoch 337/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3957e-04 - val_loss: 6.7637e-04
Epoch 338/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3731e-04 - val_loss: 5.7826e-04
Epoch 339/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1992e-04 - val_loss: 5.6473e-04
Epoch 340/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2535e-04 - val_loss: 5.5362e-04
Epoch 341/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2567e-04 - val_loss: 5.4226e-04
Epoch 342/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3055e-04 - val_loss: 5.5421e-04
Epoch 343/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2341e-04 - val_loss: 6.4528e-04
Epoch 344/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4119e-04 - val_loss: 6.2280e-04
Epoch 345/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2183e-04 - val_loss: 5.6666e-04
Epoch 346/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4152e-04 - val_loss: 5.9902e-04
Epoch 347/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1553e-04 - val_loss: 5.1870e-04
Epoch 348/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2275e-04 - val_loss: 7.7536e-04
Epoch 349/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0563e-04 - val_loss: 5.9300e-04
Epoch 350/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0919e-04 - val_loss: 5.2993e-04
Epoch 351/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0647e-04 - val_loss: 5.9053e-04
Epoch 352/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.5079e-04 - val_loss: 5.7013e-04
Epoch 353/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9688e-04 - val_loss: 4.9913e-04
Epoch 354/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4973e-04 - val_loss: 5.5876e-04
Epoch 355/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2196e-04 - val_loss: 7.7364e-04
Epoch 356/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1446e-04 - val_loss: 5.5787e-04
Epoch 357/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3789e-04 - val_loss: 5.5820e-04
Epoch 358/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0630e-04 - val_loss: 5.3328e-04
Epoch 359/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0979e-04 - val_loss: 6.1438e-04
Epoch 360/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1190e-04 - val_loss: 6.5200e-04
Epoch 361/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1007e-04 - val_loss: 5.6076e-04
Epoch 362/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0943e-04 - val_loss: 6.1246e-04
Epoch 363/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3459e-04 - val_loss: 5.5772e-04
Epoch 364/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1031e-04 - val_loss: 5.7409e-04
Epoch 365/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0734e-04 - val_loss: 6.1289e-04
Epoch 366/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2801e-04 - val_loss: 5.3621e-04
Epoch 367/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3430e-04 - val_loss: 5.8694e-04
Epoch 368/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9963e-04 - val_loss: 5.5738e-04
Epoch 369/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9645e-04 - val_loss: 5.4006e-04
Epoch 370/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0965e-04 - val_loss: 5.2797e-04
Epoch 371/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0272e-04 - val_loss: 5.7619e-04
Epoch 372/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1963e-04 - val_loss: 6.1672e-04
Epoch 373/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9677e-04 - val_loss: 5.3871e-04
Epoch 374/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1617e-04 - val_loss: 6.3777e-04
Epoch 375/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.4317e-04 - val_loss: 6.1509e-04
Epoch 376/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1126e-04 - val_loss: 5.5265e-04
Epoch 377/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.2445e-04 - val_loss: 5.6547e-04
Epoch 378/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0495e-04 - val_loss: 6.1526e-04
Epoch 379/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9418e-04 - val_loss: 5.4673e-04
Epoch 380/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1114e-04 - val_loss: 5.3221e-04
Epoch 381/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0922e-04 - val_loss: 5.7583e-04
Epoch 382/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9069e-04 - val_loss: 5.7838e-04
Epoch 383/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0779e-04 - val_loss: 6.2017e-04
Epoch 384/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1684e-04 - val_loss: 5.6757e-04
Epoch 385/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9037e-04 - val_loss: 5.4908e-04
Epoch 386/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3352e-04 - val_loss: 6.1264e-04
Epoch 387/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8155e-04 - val_loss: 5.5439e-04
Epoch 388/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9480e-04 - val_loss: 6.6723e-04
Epoch 389/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1247e-04 - val_loss: 5.6602e-04
Epoch 390/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0164e-04 - val_loss: 5.7452e-04
Epoch 391/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0459e-04 - val_loss: 6.0596e-04
Epoch 392/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1084e-04 - val_loss: 5.6829e-04
Epoch 393/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1003e-04 - val_loss: 5.6119e-04
Epoch 394/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8535e-04 - val_loss: 5.3106e-04
Epoch 395/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9104e-04 - val_loss: 5.9015e-04
Epoch 396/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9713e-04 - val_loss: 5.3543e-04
Epoch 397/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9680e-04 - val_loss: 5.4110e-04
Epoch 398/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0414e-04 - val_loss: 5.4438e-04
Epoch 399/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9522e-04 - val_loss: 6.0799e-04
Epoch 400/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8713e-04 - val_loss: 5.8916e-04
Epoch 401/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1988e-04 - val_loss: 6.2498e-04
Epoch 402/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9730e-04 - val_loss: 5.5622e-04
Epoch 403/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8838e-04 - val_loss: 5.5725e-04
Epoch 404/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0538e-04 - val_loss: 4.8206e-04
Epoch 405/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8430e-04 - val_loss: 5.9621e-04
Epoch 406/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8391e-04 - val_loss: 5.7501e-04
Epoch 407/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9877e-04 - val_loss: 7.9706e-04
Epoch 408/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1899e-04 - val_loss: 5.3953e-04
Epoch 409/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9819e-04 - val_loss: 5.4224e-04
Epoch 410/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8581e-04 - val_loss: 5.3214e-04
Epoch 411/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0083e-04 - val_loss: 5.8764e-04
Epoch 412/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9258e-04 - val_loss: 5.9242e-04
Epoch 413/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7955e-04 - val_loss: 6.0915e-04
Epoch 414/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9968e-04 - val_loss: 5.3904e-04
Epoch 415/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7346e-04 - val_loss: 5.6532e-04
Epoch 416/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7626e-04 - val_loss: 5.4747e-04
Epoch 417/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.3495e-04 - val_loss: 5.8494e-04
Epoch 418/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9429e-04 - val_loss: 6.1752e-04
Epoch 419/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8601e-04 - val_loss: 5.5216e-04
Epoch 420/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7975e-04 - val_loss: 5.3245e-04
Epoch 421/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6207e-04 - val_loss: 5.7257e-04
Epoch 422/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1225e-04 - val_loss: 5.5709e-04
Epoch 423/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9842e-04 - val_loss: 5.5384e-04
Epoch 424/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7661e-04 - val_loss: 5.0606e-04
Epoch 425/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0353e-04 - val_loss: 6.0282e-04
Epoch 426/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7489e-04 - val_loss: 5.8405e-04
Epoch 427/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0167e-04 - val_loss: 5.1783e-04
Epoch 428/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0435e-04 - val_loss: 5.5973e-04
Epoch 429/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9125e-04 - val_loss: 5.8030e-04
Epoch 430/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6986e-04 - val_loss: 5.1801e-04
Epoch 431/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7419e-04 - val_loss: 5.9931e-04
Epoch 432/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8954e-04 - val_loss: 5.5995e-04
Epoch 433/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7079e-04 - val_loss: 5.2146e-04
Epoch 434/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7921e-04 - val_loss: 4.6263e-04
Epoch 435/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7762e-04 - val_loss: 5.5900e-04
Epoch 436/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.0466e-04 - val_loss: 5.5515e-04
Epoch 437/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9761e-04 - val_loss: 4.9437e-04
Epoch 438/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7301e-04 - val_loss: 5.9840e-04
Epoch 439/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7462e-04 - val_loss: 5.3206e-04
Epoch 440/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9600e-04 - val_loss: 6.0215e-04
Epoch 441/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7195e-04 - val_loss: 6.1274e-04
Epoch 442/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6381e-04 - val_loss: 5.5510e-04
Epoch 443/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8892e-04 - val_loss: 6.0322e-04
Epoch 444/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8829e-04 - val_loss: 5.9181e-04
Epoch 445/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6562e-04 - val_loss: 5.4078e-04
Epoch 446/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8769e-04 - val_loss: 5.8781e-04
Epoch 447/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7180e-04 - val_loss: 5.2302e-04
Epoch 448/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5556e-04 - val_loss: 5.2200e-04
Epoch 449/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7789e-04 - val_loss: 5.0799e-04
Epoch 450/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7633e-04 - val_loss: 5.9445e-04
Epoch 451/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1315e-04 - val_loss: 6.3011e-04
Epoch 452/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8115e-04 - val_loss: 5.2321e-04
Epoch 453/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5301e-04 - val_loss: 5.1971e-04
Epoch 454/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6338e-04 - val_loss: 5.1260e-04
Epoch 455/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7812e-04 - val_loss: 6.0388e-04
Epoch 456/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6317e-04 - val_loss: 5.0112e-04
Epoch 457/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8009e-04 - val_loss: 5.5111e-04
Epoch 458/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7199e-04 - val_loss: 5.3002e-04
Epoch 459/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8781e-04 - val_loss: 5.2214e-04
Epoch 460/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5723e-04 - val_loss: 5.1196e-04
Epoch 461/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9852e-04 - val_loss: 5.3350e-04
Epoch 462/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6923e-04 - val_loss: 5.8690e-04
Epoch 463/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5103e-04 - val_loss: 5.3708e-04
Epoch 464/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.9069e-04 - val_loss: 5.6246e-04
Epoch 465/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7193e-04 - val_loss: 5.2243e-04
Epoch 466/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6686e-04 - val_loss: 5.8465e-04
Epoch 467/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.4802e-04 - val_loss: 5.6523e-04
Epoch 468/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.1517e-04 - val_loss: 5.3446e-04
Epoch 469/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7869e-04 - val_loss: 5.4823e-04
Epoch 470/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5652e-04 - val_loss: 5.0452e-04
Epoch 471/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6242e-04 - val_loss: 5.0743e-04
Epoch 472/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7703e-04 - val_loss: 4.7824e-04
Epoch 473/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7488e-04 - val_loss: 4.9761e-04
Epoch 474/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6399e-04 - val_loss: 5.3500e-04
Epoch 475/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7232e-04 - val_loss: 4.8843e-04
Epoch 476/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.4830e-04 - val_loss: 4.9195e-04
Epoch 477/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5796e-04 - val_loss: 5.4309e-04
Epoch 478/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5046e-04 - val_loss: 5.8127e-04
Epoch 479/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7416e-04 - val_loss: 5.3115e-04
Epoch 480/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6509e-04 - val_loss: 5.2855e-04
Epoch 481/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6773e-04 - val_loss: 5.2116e-04
Epoch 482/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6488e-04 - val_loss: 4.8547e-04
Epoch 483/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6569e-04 - val_loss: 4.8638e-04
Epoch 484/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5454e-04 - val_loss: 5.3036e-04
Epoch 485/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5842e-04 - val_loss: 5.1590e-04
Epoch 486/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6235e-04 - val_loss: 5.3239e-04
Epoch 487/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8373e-04 - val_loss: 5.0298e-04
Epoch 488/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5780e-04 - val_loss: 5.1987e-04
Epoch 489/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6173e-04 - val_loss: 5.6741e-04
Epoch 490/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.8093e-04 - val_loss: 5.7924e-04
Epoch 491/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5690e-04 - val_loss: 5.8969e-04
Epoch 492/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5856e-04 - val_loss: 4.9942e-04
Epoch 493/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.4678e-04 - val_loss: 5.0822e-04
Epoch 494/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.7400e-04 - val_loss: 5.4361e-04
Epoch 495/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5840e-04 - val_loss: 5.9919e-04
Epoch 496/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5135e-04 - val_loss: 4.9544e-04
Epoch 497/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.6334e-04 - val_loss: 5.4150e-04
Epoch 498/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.4739e-04 - val_loss: 6.5828e-04
Epoch 499/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.5710e-04 - val_loss: 5.1126e-04
Epoch 500/500
1074/1074 [==============================] - 2s 2ms/step - loss: 5.4162e-04 - val_loss: 5.5003e-04
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_6 (Dense) (None, 100) 3700
dropout_4 (Dropout) (None, 100) 0
dense_7 (Dense) (None, 100) 10100
dropout_5 (Dropout) (None, 100) 0
dense_8 (Dense) (None, 100) 10100
dropout_6 (Dropout) (None, 100) 0
dense_9 (Dense) (None, 100) 10100
dropout_7 (Dropout) (None, 100) 0
dense_10 (Dense) (None, 100) 10100
dense_11 (Dense) (None, 6) 606
=================================================================
Total params: 44,706
Trainable params: 44,706
Non-trainable params: 0
_________________________________________________________________
CPU times: user 28min 24s, sys: 14min 28s, total: 42min 53s
Wall time: 15min 11s
dnn_model_36.save(output_dir/"{}_{}.h5".format(dnn_model_36_tag, timestamp))
plot_loss(history_dnn_36, 'loss_{}.pdf'.format(dnn_model_36_tag), 'DNN model (predict 6 forces from 36 input features)')
# save model loss on test set for evaluation section below
test_results['dnn_36'] = dnn_model_36.evaluate(X_test_36, Y_test, verbose=0)
Y_test_pred_dnn_36 = dnn_model_36.predict(X_test_36)
plot_pred_vs_true(Y_test_pred_dnn_36, Y_test, 'pred_vs_true_{}'.format(dnn_model_36_tag), titles = ['$f_{x_1}$','$f_{y_1}$','$f_{z_1}$','$f_{x_2}$','$f_{y_2}$','$f_{z_2}$'])
Up to 6th order derivatives for 12 input features
dnn_model_84 = setup_dnn_model(Y_train.shape[-1])
dnn_model_84_tag = "{}_84features".format(dnn_tag)
%%time
early_stop = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=20)
save_every_epoch = tf.keras.callbacks.ModelCheckpoint(output_dir/'dnn_84_tmp.h5', monitor='val_loss', save_freq='epoch')
history_dnn_84 = dnn_model_84.fit(
X_train, Y_train,
validation_data=(X_val, Y_val),
batch_size = dnn_batch_size,
epochs=dnn_epochs,
callbacks=[save_every_epoch]
#callbacks=[early_stop, save_every_epoch]
#verbose=0,
)
dnn_model_84.summary()
with open(output_dir/'history_dnn_84.pickle', 'wb') as f:
pickle.dump(history_dnn_84.history, f)
Epoch 1/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0245 - val_loss: 0.0106
Epoch 2/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0110 - val_loss: 0.0089
Epoch 3/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0087 - val_loss: 0.0075
Epoch 4/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0077 - val_loss: 0.0057
Epoch 5/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0065 - val_loss: 0.0072
Epoch 6/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0056 - val_loss: 0.0057
Epoch 7/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0052 - val_loss: 0.0036
Epoch 8/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0047 - val_loss: 0.0034
Epoch 9/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0039 - val_loss: 0.0034
Epoch 10/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0040 - val_loss: 0.0024
Epoch 11/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0039 - val_loss: 0.0019
Epoch 12/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0033 - val_loss: 0.0028
Epoch 13/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0033 - val_loss: 0.0019
Epoch 14/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0036 - val_loss: 0.0018
Epoch 15/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0031 - val_loss: 0.0022
Epoch 16/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0028 - val_loss: 0.0019
Epoch 17/500
1074/1074 [==============================] - 2s 1ms/step - loss: 0.0037 - val_loss: 0.0029
Epoch 18/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0028 - val_loss: 0.0019
Epoch 19/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0029 - val_loss: 0.0017
Epoch 20/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0027 - val_loss: 0.0017
Epoch 21/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0030 - val_loss: 0.0038
Epoch 22/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0026 - val_loss: 0.0017
Epoch 23/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0025 - val_loss: 0.0022
Epoch 24/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0025 - val_loss: 0.0016
Epoch 25/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0026 - val_loss: 0.0013
Epoch 26/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0024 - val_loss: 0.0013
Epoch 27/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0025 - val_loss: 0.0021
Epoch 28/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0014
Epoch 29/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0024 - val_loss: 0.0016
Epoch 30/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0026 - val_loss: 0.0013
Epoch 31/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0012
Epoch 32/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0014
Epoch 33/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0024 - val_loss: 0.0025
Epoch 34/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0018
Epoch 35/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0023 - val_loss: 0.0012
Epoch 36/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0017
Epoch 37/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0014
Epoch 38/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0036
Epoch 39/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0022 - val_loss: 0.0013
Epoch 40/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0011
Epoch 41/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0015
Epoch 42/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0048
Epoch 43/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0024
Epoch 44/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0021 - val_loss: 0.0013
Epoch 45/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0010
Epoch 46/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0020 - val_loss: 0.0013
Epoch 47/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0012
Epoch 48/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0016
Epoch 49/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0019
Epoch 50/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0012
Epoch 51/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0033
Epoch 52/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0011
Epoch 53/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 54/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0019 - val_loss: 0.0011
Epoch 55/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 56/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0011
Epoch 57/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0018 - val_loss: 0.0013
Epoch 58/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 59/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 60/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0012
Epoch 61/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0013
Epoch 62/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 0.0015
Epoch 63/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 64/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012 - ETA: 0
Epoch 65/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 9.9660e-04
Epoch 66/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0016
Epoch 67/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 68/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0014
Epoch 69/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0016
Epoch 70/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0012
Epoch 71/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 72/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0013
Epoch 73/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0016 - val_loss: 0.0011
Epoch 74/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0016
Epoch 75/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0011
Epoch 76/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0017 - val_loss: 9.5424e-04
Epoch 77/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0015 - val_loss: 0.0013
Epoch 78/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0015 - val_loss: 9.7472e-04
Epoch 79/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0016 - val_loss: 9.1790e-04
Epoch 80/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0014 - val_loss: 9.6187e-04
Epoch 81/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0015 - val_loss: 9.9752e-04
Epoch 82/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0014
Epoch 83/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0010
Epoch 84/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 85/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 9.5048e-04
Epoch 86/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.8909e-04
Epoch 87/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.4389e-04
Epoch 88/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 9.7432e-04
Epoch 89/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0011
Epoch 90/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.6603e-04
Epoch 91/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0015
Epoch 92/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0010
Epoch 93/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 9.3157e-04
Epoch 94/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 8.4812e-04
Epoch 95/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.6305e-04
Epoch 96/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0010
Epoch 97/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0015
Epoch 98/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0015 - val_loss: 0.0011
Epoch 99/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 9.3801e-04
Epoch 100/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.4961e-04
Epoch 101/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0017
Epoch 102/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.5732e-04
Epoch 103/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.6248e-04
Epoch 104/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 0.0014
Epoch 105/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 8.8579e-04
Epoch 106/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.1401e-04
Epoch 107/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 9.4915e-04
Epoch 108/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 109/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.0263e-04
Epoch 110/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.0610e-04
Epoch 111/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0014
Epoch 112/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0015
Epoch 113/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 8.9982e-04
Epoch 114/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0012
Epoch 115/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 116/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 117/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.1361e-04
Epoch 118/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 7.9108e-04
Epoch 119/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.3958e-04
Epoch 120/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.9808e-04
Epoch 121/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0013 - val_loss: 8.1967e-04
Epoch 122/500
1074/1074 [==============================] - 3s 3ms/step - loss: 0.0013 - val_loss: 8.1049e-04
Epoch 123/500
1074/1074 [==============================] - 3s 2ms/step - loss: 0.0012 - val_loss: 8.5540e-04
Epoch 124/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.9034e-04
Epoch 125/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 7.4769e-04
Epoch 126/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.2225e-04
Epoch 127/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0014
Epoch 128/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0014
Epoch 129/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.4949e-04
Epoch 130/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 8.4986e-04
Epoch 131/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 0.0010
Epoch 132/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.2797e-04
Epoch 133/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.4812e-04
Epoch 134/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 135/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0014 - val_loss: 8.4055e-04
Epoch 136/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 137/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 138/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0011
Epoch 139/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 140/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 141/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.0539e-04
Epoch 142/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.5211e-04
Epoch 143/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0013
Epoch 144/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0010
Epoch 145/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0018
Epoch 146/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.5037e-04
Epoch 147/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.5698e-04
Epoch 148/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 149/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.7484e-04
Epoch 150/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.1723e-04
Epoch 151/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.6429e-04
Epoch 152/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.7197e-04
Epoch 153/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.3164e-04
Epoch 154/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.3589e-04
Epoch 155/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.1433e-04
Epoch 156/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 157/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.8705e-04
Epoch 158/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.6255e-04
Epoch 159/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 8.8752e-04
Epoch 160/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.1998e-04
Epoch 161/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.0308e-04
Epoch 162/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.9719e-04
Epoch 163/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0014
Epoch 164/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 165/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.1732e-04
Epoch 166/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0010
Epoch 167/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.2393e-04
Epoch 168/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0022
Epoch 169/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0013 - val_loss: 9.0364e-04
Epoch 170/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.3197e-04
Epoch 171/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 8.7414e-04
Epoch 172/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0012
Epoch 173/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.3291e-04
Epoch 174/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.3800e-04
Epoch 175/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 9.3254e-04
Epoch 176/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 0.0014
Epoch 177/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.8201e-04
Epoch 178/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 179/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.5759e-04
Epoch 180/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.9984e-04 - val_loss: 8.5059e-04
Epoch 181/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 9.6879e-04
Epoch 182/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.7227e-04
Epoch 183/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 184/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.8118e-04
Epoch 185/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.4533e-04
Epoch 186/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 187/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 188/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.4998e-04
Epoch 189/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.5733e-04 - val_loss: 0.0013
Epoch 190/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.1797e-04
Epoch 191/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.2276e-04
Epoch 192/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.5307e-04
Epoch 193/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 9.4404e-04
Epoch 194/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7070e-04 - val_loss: 6.8021e-04
Epoch 195/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7429e-04 - val_loss: 9.3126e-04
Epoch 196/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 8.0875e-04
Epoch 197/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 9.9724e-04
Epoch 198/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0011
Epoch 199/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0013
Epoch 200/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 201/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7020e-04 - val_loss: 8.2843e-04
Epoch 202/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7007e-04 - val_loss: 7.5611e-04
Epoch 203/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0010
Epoch 204/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 7.0378e-04
Epoch 205/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6623e-04 - val_loss: 7.5058e-04
Epoch 206/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 6.7804e-04
Epoch 207/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4136e-04 - val_loss: 8.5606e-04
Epoch 208/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7626e-04 - val_loss: 9.3784e-04
Epoch 209/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 9.5720e-04
Epoch 210/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 0.0013
Epoch 211/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6878e-04 - val_loss: 8.7612e-04
Epoch 212/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7571e-04 - val_loss: 7.4601e-04
Epoch 213/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 8.2084e-04
Epoch 214/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0119e-04 - val_loss: 6.9488e-04
Epoch 215/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0010 - val_loss: 6.9434e-04
Epoch 216/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6722e-04 - val_loss: 7.4542e-04
Epoch 217/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0012 - val_loss: 9.4265e-04
Epoch 218/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6004e-04 - val_loss: 7.7586e-04
Epoch 219/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0185e-04 - val_loss: 6.7527e-04
Epoch 220/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6254e-04 - val_loss: 9.7472e-04
Epoch 221/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3721e-04 - val_loss: 7.0419e-04
Epoch 222/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0636e-04 - val_loss: 8.4985e-04
Epoch 223/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.5719e-04 - val_loss: 6.7854e-04
Epoch 224/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.8564e-04 - val_loss: 9.9351e-04
Epoch 225/500
1074/1074 [==============================] - 2s 2ms/step - loss: 0.0011 - val_loss: 7.6733e-04
Epoch 226/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7393e-04 - val_loss: 7.1017e-04
Epoch 227/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4566e-04 - val_loss: 6.9679e-04
Epoch 228/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.9091e-04 - val_loss: 0.0010
Epoch 229/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4649e-04 - val_loss: 9.1745e-04
Epoch 230/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1416e-04 - val_loss: 8.0509e-04
Epoch 231/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1248e-04 - val_loss: 7.2629e-04
Epoch 232/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7242e-04 - val_loss: 7.8745e-04
Epoch 233/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3119e-04 - val_loss: 7.2930e-04
Epoch 234/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4020e-04 - val_loss: 7.0854e-04
Epoch 235/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4825e-04 - val_loss: 7.1684e-04
Epoch 236/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4645e-04 - val_loss: 8.2225e-04
Epoch 237/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9386e-04 - val_loss: 9.1675e-04
Epoch 238/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0581e-04 - val_loss: 7.6986e-04
Epoch 239/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.9322e-04 - val_loss: 8.2604e-04
Epoch 240/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0329e-04 - val_loss: 0.0011
Epoch 241/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2249e-04 - val_loss: 7.8341e-04
Epoch 242/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4370e-04 - val_loss: 7.8975e-04
Epoch 243/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7713e-04 - val_loss: 7.2495e-04
Epoch 244/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2281e-04 - val_loss: 8.1582e-04
Epoch 245/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0169e-04 - val_loss: 6.5860e-04
Epoch 246/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2503e-04 - val_loss: 6.9158e-04
Epoch 247/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.4122e-04 - val_loss: 7.7677e-04
Epoch 248/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7534e-04 - val_loss: 6.3383e-04
Epoch 249/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8871e-04 - val_loss: 7.3341e-04
Epoch 250/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8401e-04 - val_loss: 6.4567e-04
Epoch 251/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.6718e-04 - val_loss: 0.0011
Epoch 252/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9811e-04 - val_loss: 8.0880e-04
Epoch 253/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8767e-04 - val_loss: 0.0010
Epoch 254/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.7683e-04 - val_loss: 8.0637e-04
Epoch 255/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2435e-04 - val_loss: 8.6060e-04
Epoch 256/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5404e-04 - val_loss: 6.6405e-04
Epoch 257/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0979e-04 - val_loss: 7.4363e-04
Epoch 258/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0620e-04 - val_loss: 7.1416e-04
Epoch 259/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9797e-04 - val_loss: 7.5567e-04
Epoch 260/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1698e-04 - val_loss: 7.4487e-04
Epoch 261/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4478e-04 - val_loss: 6.8338e-04
Epoch 262/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4794e-04 - val_loss: 0.0012
Epoch 263/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8313e-04 - val_loss: 7.3974e-04
Epoch 264/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3297e-04 - val_loss: 7.8506e-04
Epoch 265/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7627e-04 - val_loss: 6.8326e-04
Epoch 266/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9488e-04 - val_loss: 7.7198e-04
Epoch 267/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8197e-04 - val_loss: 7.5985e-04
Epoch 268/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6125e-04 - val_loss: 8.8978e-04
Epoch 269/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0321e-04 - val_loss: 7.1386e-04
Epoch 270/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8126e-04 - val_loss: 7.6151e-04
Epoch 271/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9394e-04 - val_loss: 8.9144e-04
Epoch 272/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0314e-04 - val_loss: 9.7705e-04
Epoch 273/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.3656e-04 - val_loss: 8.6294e-04
Epoch 274/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6116e-04 - val_loss: 0.0011
Epoch 275/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1465e-04 - val_loss: 0.0013
Epoch 276/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9701e-04 - val_loss: 8.9064e-04
Epoch 277/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3774e-04 - val_loss: 7.8071e-04
Epoch 278/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5578e-04 - val_loss: 7.8539e-04
Epoch 279/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8841e-04 - val_loss: 7.5029e-04
Epoch 280/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7433e-04 - val_loss: 7.5356e-04
Epoch 281/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7472e-04 - val_loss: 0.0011
Epoch 282/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2774e-04 - val_loss: 7.3087e-04
Epoch 283/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6950e-04 - val_loss: 9.0796e-04
Epoch 284/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3109e-04 - val_loss: 7.2503e-04
Epoch 285/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2160e-04 - val_loss: 7.6480e-04
Epoch 286/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.8345e-04 - val_loss: 6.8993e-04
Epoch 287/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5913e-04 - val_loss: 7.9052e-04
Epoch 288/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5429e-04 - val_loss: 6.5362e-04
Epoch 289/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4613e-04 - val_loss: 7.0365e-04
Epoch 290/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3082e-04 - val_loss: 7.2302e-04
Epoch 291/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3071e-04 - val_loss: 9.1907e-04
Epoch 292/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.0276e-04 - val_loss: 9.8355e-04
Epoch 293/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1388e-04 - val_loss: 7.5674e-04
Epoch 294/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4136e-04 - val_loss: 6.9019e-04
Epoch 295/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2362e-04 - val_loss: 7.4060e-04
Epoch 296/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.1283e-04 - val_loss: 7.2887e-04
Epoch 297/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1731e-04 - val_loss: 7.4535e-04
Epoch 298/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4279e-04 - val_loss: 7.6663e-04
Epoch 299/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1630e-04 - val_loss: 6.2784e-04
Epoch 300/500
1074/1074 [==============================] - 2s 2ms/step - loss: 9.2037e-04 - val_loss: 8.4216e-04
Epoch 301/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.9085e-04 - val_loss: 6.9626e-04
Epoch 302/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0222e-04 - val_loss: 6.6423e-04
Epoch 303/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5738e-04 - val_loss: 7.2213e-04
Epoch 304/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2896e-04 - val_loss: 7.2230e-04
Epoch 305/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4730e-04 - val_loss: 7.0293e-04
Epoch 306/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3486e-04 - val_loss: 6.4003e-04
Epoch 307/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9807e-04 - val_loss: 7.1614e-04
Epoch 308/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6831e-04 - val_loss: 7.3468e-04
Epoch 309/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.7875e-04 - val_loss: 7.8371e-04
Epoch 310/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3531e-04 - val_loss: 6.7544e-04
Epoch 311/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.3379e-04 - val_loss: 7.3456e-04
Epoch 312/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7865e-04 - val_loss: 9.2292e-04
Epoch 313/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1063e-04 - val_loss: 7.8630e-04
Epoch 314/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1610e-04 - val_loss: 6.8046e-04
Epoch 315/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1516e-04 - val_loss: 7.5639e-04
Epoch 316/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4275e-04 - val_loss: 6.7311e-04
Epoch 317/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4068e-04 - val_loss: 7.6702e-04
Epoch 318/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5545e-04 - val_loss: 8.7234e-04
Epoch 319/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2370e-04 - val_loss: 6.6684e-04
Epoch 320/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1376e-04 - val_loss: 7.3074e-04
Epoch 321/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4949e-04 - val_loss: 8.0011e-04
Epoch 322/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1932e-04 - val_loss: 7.1415e-04
Epoch 323/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1683e-04 - val_loss: 7.5557e-04
Epoch 324/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9731e-04 - val_loss: 7.8037e-04
Epoch 325/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2490e-04 - val_loss: 7.9167e-04
Epoch 326/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5766e-04 - val_loss: 6.9855e-04
Epoch 327/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0699e-04 - val_loss: 6.9156e-04
Epoch 328/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8438e-04 - val_loss: 7.3986e-04
Epoch 329/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8614e-04 - val_loss: 7.9486e-04
Epoch 330/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5184e-04 - val_loss: 7.5276e-04
Epoch 331/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2627e-04 - val_loss: 7.1069e-04
Epoch 332/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0542e-04 - val_loss: 7.2302e-04
Epoch 333/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7546e-04 - val_loss: 7.4955e-04
Epoch 334/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1129e-04 - val_loss: 7.7743e-04
Epoch 335/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7941e-04 - val_loss: 7.9673e-04
Epoch 336/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1180e-04 - val_loss: 6.6830e-04
Epoch 337/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.6886e-04 - val_loss: 6.8743e-04
Epoch 338/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8196e-04 - val_loss: 6.6058e-04
Epoch 339/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8135e-04 - val_loss: 7.8075e-04
Epoch 340/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7812e-04 - val_loss: 7.2537e-04
Epoch 341/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1044e-04 - val_loss: 9.4620e-04
Epoch 342/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9133e-04 - val_loss: 7.6935e-04
Epoch 343/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9855e-04 - val_loss: 8.3168e-04
Epoch 344/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7496e-04 - val_loss: 8.1058e-04
Epoch 345/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1758e-04 - val_loss: 6.8706e-04
Epoch 346/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.5500e-04 - val_loss: 6.3634e-04
Epoch 347/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2413e-04 - val_loss: 6.8993e-04
Epoch 348/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3081e-04 - val_loss: 7.4250e-04
Epoch 349/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1699e-04 - val_loss: 8.0139e-04
Epoch 350/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7883e-04 - val_loss: 7.1052e-04
Epoch 351/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6932e-04 - val_loss: 7.0511e-04
Epoch 352/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8046e-04 - val_loss: 7.3479e-04
Epoch 353/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0440e-04 - val_loss: 7.7501e-04
Epoch 354/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.1099e-04 - val_loss: 7.0302e-04
Epoch 355/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6999e-04 - val_loss: 6.8641e-04
Epoch 356/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0790e-04 - val_loss: 7.8057e-04
Epoch 357/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0428e-04 - val_loss: 6.9875e-04
Epoch 358/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.2980e-04 - val_loss: 7.0856e-04
Epoch 359/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4145e-04 - val_loss: 7.5821e-04
Epoch 360/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7281e-04 - val_loss: 7.5917e-04
Epoch 361/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8419e-04 - val_loss: 8.0777e-04
Epoch 362/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6404e-04 - val_loss: 6.7034e-04
Epoch 363/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6515e-04 - val_loss: 6.8405e-04
Epoch 364/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.4578e-04 - val_loss: 6.8902e-04
Epoch 365/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6152e-04 - val_loss: 6.5786e-04
Epoch 366/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2761e-04 - val_loss: 6.5565e-04
Epoch 367/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6412e-04 - val_loss: 6.8011e-04
Epoch 368/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8597e-04 - val_loss: 6.7201e-04
Epoch 369/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7303e-04 - val_loss: 7.3073e-04
Epoch 370/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7160e-04 - val_loss: 6.8361e-04
Epoch 371/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5993e-04 - val_loss: 6.3589e-04
Epoch 372/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6108e-04 - val_loss: 6.5222e-04
Epoch 373/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5799e-04 - val_loss: 7.6768e-04
Epoch 374/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9321e-04 - val_loss: 7.5737e-04
Epoch 375/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6425e-04 - val_loss: 9.7197e-04
Epoch 376/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8723e-04 - val_loss: 7.1215e-04
Epoch 377/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3708e-04 - val_loss: 6.7711e-04
Epoch 378/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8556e-04 - val_loss: 7.7013e-04
Epoch 379/500
1074/1074 [==============================] - 2s 2ms/step - loss: 8.0142e-04 - val_loss: 7.3392e-04
Epoch 380/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3534e-04 - val_loss: 7.6086e-04
Epoch 381/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6637e-04 - val_loss: 6.7208e-04
Epoch 382/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9425e-04 - val_loss: 6.7973e-04
Epoch 383/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4321e-04 - val_loss: 6.4698e-04
Epoch 384/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5758e-04 - val_loss: 7.4642e-04
Epoch 385/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8439e-04 - val_loss: 7.2850e-04
Epoch 386/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6351e-04 - val_loss: 7.8788e-04
Epoch 387/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6339e-04 - val_loss: 7.2957e-04
Epoch 388/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5835e-04 - val_loss: 6.0994e-04
Epoch 389/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4539e-04 - val_loss: 6.5578e-04
Epoch 390/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6102e-04 - val_loss: 7.0919e-04
Epoch 391/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6701e-04 - val_loss: 7.6121e-04
Epoch 392/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5860e-04 - val_loss: 6.0709e-04
Epoch 393/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5120e-04 - val_loss: 6.2790e-04
Epoch 394/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5025e-04 - val_loss: 6.8896e-04
Epoch 395/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6527e-04 - val_loss: 7.4456e-04
Epoch 396/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4924e-04 - val_loss: 7.1368e-04
Epoch 397/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7000e-04 - val_loss: 7.5588e-04
Epoch 398/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2673e-04 - val_loss: 7.0482e-04
Epoch 399/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6031e-04 - val_loss: 6.7974e-04
Epoch 400/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6953e-04 - val_loss: 6.0348e-04
Epoch 401/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2292e-04 - val_loss: 7.0400e-04
Epoch 402/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4341e-04 - val_loss: 6.6618e-04
Epoch 403/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.9491e-04 - val_loss: 7.5271e-04
Epoch 404/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3156e-04 - val_loss: 7.4257e-04
Epoch 405/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5601e-04 - val_loss: 6.5309e-04
Epoch 406/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4917e-04 - val_loss: 7.1782e-04
Epoch 407/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4437e-04 - val_loss: 6.6563e-04
Epoch 408/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4830e-04 - val_loss: 6.3906e-04
Epoch 409/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1983e-04 - val_loss: 7.6780e-04
Epoch 410/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5506e-04 - val_loss: 6.5099e-04
Epoch 411/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1599e-04 - val_loss: 8.9700e-04
Epoch 412/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6083e-04 - val_loss: 6.5332e-04
Epoch 413/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8962e-04 - val_loss: 7.5702e-04
Epoch 414/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2824e-04 - val_loss: 6.5426e-04
Epoch 415/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2004e-04 - val_loss: 6.9254e-04
Epoch 416/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2813e-04 - val_loss: 6.9904e-04
Epoch 417/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.7007e-04 - val_loss: 7.4287e-04
Epoch 418/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2375e-04 - val_loss: 7.0177e-04
Epoch 419/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4381e-04 - val_loss: 7.0398e-04
Epoch 420/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4287e-04 - val_loss: 7.8999e-04
Epoch 421/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5688e-04 - val_loss: 8.3132e-04
Epoch 422/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.8347e-04 - val_loss: 6.9192e-04
Epoch 423/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0804e-04 - val_loss: 7.4051e-04
Epoch 424/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3626e-04 - val_loss: 7.7648e-04
Epoch 425/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3772e-04 - val_loss: 6.7477e-04
Epoch 426/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9948e-04 - val_loss: 7.3985e-04
Epoch 427/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.5829e-04 - val_loss: 7.6164e-04
Epoch 428/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1800e-04 - val_loss: 7.3426e-04
Epoch 429/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3849e-04 - val_loss: 7.6020e-04
Epoch 430/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3597e-04 - val_loss: 7.4912e-04
Epoch 431/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0958e-04 - val_loss: 9.5149e-04
Epoch 432/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3778e-04 - val_loss: 7.2315e-04
Epoch 433/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1215e-04 - val_loss: 7.4594e-04
Epoch 434/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4454e-04 - val_loss: 6.7822e-04
Epoch 435/500
1074/1074 [==============================] - 3s 2ms/step - loss: 7.3669e-04 - val_loss: 7.0546e-04
Epoch 436/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1614e-04 - val_loss: 9.1135e-04
Epoch 437/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4278e-04 - val_loss: 7.3078e-04
Epoch 438/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4955e-04 - val_loss: 6.2530e-04
Epoch 439/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1470e-04 - val_loss: 6.0730e-04
Epoch 440/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2363e-04 - val_loss: 6.7716e-04
Epoch 441/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2906e-04 - val_loss: 6.1134e-04
Epoch 442/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6280e-04 - val_loss: 6.1292e-04
Epoch 443/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6237e-04 - val_loss: 6.2758e-04
Epoch 444/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9029e-04 - val_loss: 6.2245e-04
Epoch 445/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1840e-04 - val_loss: 6.7508e-04
Epoch 446/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3443e-04 - val_loss: 7.6727e-04
Epoch 447/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0535e-04 - val_loss: 6.7840e-04
Epoch 448/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3846e-04 - val_loss: 6.5824e-04
Epoch 449/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2362e-04 - val_loss: 6.7191e-04
Epoch 450/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4651e-04 - val_loss: 6.8701e-04
Epoch 451/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9898e-04 - val_loss: 7.8115e-04
Epoch 452/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0853e-04 - val_loss: 6.4693e-04
Epoch 453/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3402e-04 - val_loss: 7.3387e-04
Epoch 454/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3789e-04 - val_loss: 6.1696e-04
Epoch 455/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9203e-04 - val_loss: 0.0011
Epoch 456/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2395e-04 - val_loss: 7.2126e-04
Epoch 457/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0784e-04 - val_loss: 6.5539e-04
Epoch 458/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1060e-04 - val_loss: 6.0060e-04
Epoch 459/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2442e-04 - val_loss: 7.6873e-04
Epoch 460/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0754e-04 - val_loss: 6.6896e-04
Epoch 461/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2781e-04 - val_loss: 0.0013
Epoch 462/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3568e-04 - val_loss: 7.8933e-04
Epoch 463/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0385e-04 - val_loss: 0.0010
Epoch 464/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9621e-04 - val_loss: 6.5420e-04
Epoch 465/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1551e-04 - val_loss: 6.7945e-04
Epoch 466/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0950e-04 - val_loss: 6.3985e-04
Epoch 467/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0030e-04 - val_loss: 6.9102e-04
Epoch 468/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1023e-04 - val_loss: 7.7525e-04
Epoch 469/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.2448e-04 - val_loss: 7.0344e-04
Epoch 470/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0491e-04 - val_loss: 6.9779e-04
Epoch 471/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0560e-04 - val_loss: 8.6660e-04
Epoch 472/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9088e-04 - val_loss: 8.6717e-04
Epoch 473/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4737e-04 - val_loss: 6.8766e-04
Epoch 474/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9971e-04 - val_loss: 6.9985e-04
Epoch 475/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9646e-04 - val_loss: 6.0723e-04
Epoch 476/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8140e-04 - val_loss: 8.5252e-04
Epoch 477/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.4918e-04 - val_loss: 8.7819e-04
Epoch 478/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8811e-04 - val_loss: 9.1343e-04
Epoch 479/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.6116e-04 - val_loss: 7.3068e-04
Epoch 480/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7692e-04 - val_loss: 7.4562e-04
Epoch 481/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8532e-04 - val_loss: 6.9433e-04
Epoch 482/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0545e-04 - val_loss: 6.4266e-04
Epoch 483/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1912e-04 - val_loss: 7.4271e-04
Epoch 484/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1194e-04 - val_loss: 7.1140e-04
Epoch 485/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8370e-04 - val_loss: 6.5759e-04
Epoch 486/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3059e-04 - val_loss: 6.8802e-04
Epoch 487/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8188e-04 - val_loss: 6.4646e-04
Epoch 488/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8893e-04 - val_loss: 6.7801e-04
Epoch 489/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7535e-04 - val_loss: 6.7374e-04
Epoch 490/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1107e-04 - val_loss: 7.6883e-04
Epoch 491/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.7781e-04 - val_loss: 7.0576e-04
Epoch 492/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8912e-04 - val_loss: 6.9028e-04
Epoch 493/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.3377e-04 - val_loss: 6.6660e-04
Epoch 494/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.1903e-04 - val_loss: 7.9158e-04
Epoch 495/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6500e-04 - val_loss: 7.0753e-04
Epoch 496/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.6707e-04 - val_loss: 7.1748e-04
Epoch 497/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.9583e-04 - val_loss: 8.6439e-04
Epoch 498/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8438e-04 - val_loss: 8.4890e-04
Epoch 499/500
1074/1074 [==============================] - 2s 2ms/step - loss: 7.0220e-04 - val_loss: 7.5822e-04
Epoch 500/500
1074/1074 [==============================] - 2s 2ms/step - loss: 6.8681e-04 - val_loss: 7.0124e-04
Model: "sequential_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_12 (Dense) (None, 100) 8500
dropout_8 (Dropout) (None, 100) 0
dense_13 (Dense) (None, 100) 10100
dropout_9 (Dropout) (None, 100) 0
dense_14 (Dense) (None, 100) 10100
dropout_10 (Dropout) (None, 100) 0
dense_15 (Dense) (None, 100) 10100
dropout_11 (Dropout) (None, 100) 0
dense_16 (Dense) (None, 100) 10100
dense_17 (Dense) (None, 6) 606
=================================================================
Total params: 49,506
Trainable params: 49,506
Non-trainable params: 0
_________________________________________________________________
CPU times: user 31min 13s, sys: 14min 4s, total: 45min 18s
Wall time: 17min 34s
dnn_model_84.save(output_dir/"{}_{}.h5".format(dnn_model_84_tag, timestamp))
plot_loss(history_dnn_84, 'loss_{}.pdf'.format(dnn_model_84_tag), 'DNN model (predict 6 forces from 84 input features)')
# save model loss on test set for evaluation section below
test_results['dnn_84'] = dnn_model_84.evaluate(X_test, Y_test, verbose=0)
Y_test_pred_dnn_84 = dnn_model_84.predict(X_test)
plot_pred_vs_true(Y_test_pred_dnn_84, Y_test, 'pred_vs_true_{}'.format(dnn_model_84_tag), titles = ['$f_{x_1}$','$f_{y_1}$','$f_{z_1}$','$f_{x_2}$','$f_{y_2}$','$f_{z_2}$'])
I experimented with LSTM, GRU, and SimpleRNN layers. Performance was relatively similar, but the LSTM seemed to do slightly better.
As is shown in section 5, an RNN did a significantly better job predicted unseen data than a DNN, especially for the dataset not seen during training.
def setup_rnn_model(n_outputs):
model = keras.Sequential([
#layers.BatchNormalization(),
layers.LSTM(100, activation='relu', return_sequences=True),
#layers.SimpleRNN(100, activation='relu', return_sequences=True),
#layers.GRU(100, activation='relu', return_sequences=True),
layers.Dropout(0.05),
layers.LSTM(100, activation='relu', return_sequences=False),
#layers.SimpleRNN(100, activation='relu', return_sequences=False),
#layers.GRU(100, activation='relu', return_sequences=False),
layers.Dropout(0.05),
layers.Dense(100, activation='relu'),
layers.Dense(n_outputs)
])
model.compile(loss='mean_squared_error',
optimizer=tf.keras.optimizers.Adam(learning_rate=1e-3, decay=5e-6))
return model
# rnn config
rnn_tag = "rnn_lstm200x2_dense100x1_0p5drop"
rnn_epochs = 200
rnn_batch_size = 32
rnn_model_84 = setup_rnn_model(Y_seq_train.shape[-1])
rnn_model_84_tag = "{}_84features".format(rnn_tag)
%%time
early_stop = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=20)
save_every_epoch = tf.keras.callbacks.ModelCheckpoint(output_dir/'rnn_84_tmp.h5', monitor='val_loss', save_freq='epoch')
history_rnn_84 = rnn_model_84.fit(
X_seq_train, Y_seq_train,
validation_data=(X_seq_val, Y_seq_val),
batch_size = rnn_batch_size,
epochs=rnn_epochs,
callbacks=[save_every_epoch]
#callbacks=[early_stop, save_every_epoch]
#verbose=0,
)
rnn_model_84.summary()
with open(output_dir/'history_rnn_84.pickle', 'wb') as f:
pickle.dump(history_rnn_84.history, f)
Epoch 1/200
1074/1074 [==============================] - 20s 17ms/step - loss: 0.0173 - val_loss: 0.0113
Epoch 2/200
1074/1074 [==============================] - 18s 16ms/step - loss: 0.0083 - val_loss: 0.0056
Epoch 3/200
1074/1074 [==============================] - 19s 17ms/step - loss: 0.0061 - val_loss: 0.0073
Epoch 4/200
1074/1074 [==============================] - 18s 17ms/step - loss: 0.0048 - val_loss: 0.0030
Epoch 5/200
1074/1074 [==============================] - 17s 16ms/step - loss: 0.0042 - val_loss: 0.0029
Epoch 6/200
1074/1074 [==============================] - 18s 17ms/step - loss: 0.0034 - val_loss: 0.0023
Epoch 7/200
1074/1074 [==============================] - 18s 17ms/step - loss: 0.0029 - val_loss: 0.0033
Epoch 8/200
1074/1074 [==============================] - 18s 16ms/step - loss: 0.0029 - val_loss: 0.0027
Epoch 9/200
1074/1074 [==============================] - 18s 16ms/step - loss: 0.0024 - val_loss: 0.0038
Epoch 10/200
1074/1074 [==============================] - 19s 17ms/step - loss: 0.0026 - val_loss: 0.0016
Epoch 11/200
1074/1074 [==============================] - 17s 16ms/step - loss: 0.0022 - val_loss: 0.0021
Epoch 12/200
1074/1074 [==============================] - 18s 17ms/step - loss: 0.0021 - val_loss: 0.0017
Epoch 13/200
1074/1074 [==============================] - 19s 17ms/step - loss: 0.0023 - val_loss: 0.0014
Epoch 14/200
1074/1074 [==============================] - 23s 21ms/step - loss: 0.0017 - val_loss: 0.0027
Epoch 15/200
1074/1074 [==============================] - 25s 23ms/step - loss: 0.0018 - val_loss: 0.0013
Epoch 16/200
1074/1074 [==============================] - 23s 22ms/step - loss: 0.0016 - val_loss: 0.0042
Epoch 17/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0016 - val_loss: 0.0022
Epoch 18/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0016 - val_loss: 0.0015
Epoch 19/200
1074/1074 [==============================] - 23s 22ms/step - loss: 0.0015 - val_loss: 0.0016
Epoch 20/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0015 - val_loss: 0.0020
Epoch 21/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0015 - val_loss: 0.0012
Epoch 22/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0014 - val_loss: 0.0010
Epoch 23/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0013 - val_loss: 0.0014
Epoch 24/200
1074/1074 [==============================] - 23s 21ms/step - loss: 0.0014 - val_loss: 0.0013
Epoch 25/200
1074/1074 [==============================] - 22s 20ms/step - loss: 0.0013 - val_loss: 0.0011
Epoch 26/200
1074/1074 [==============================] - 22s 21ms/step - loss: 0.0012 - val_loss: 8.6852e-04
Epoch 27/200
1074/1074 [==============================] - 22s 20ms/step - loss: 0.0012 - val_loss: 8.7304e-04
Epoch 28/200
1074/1074 [==============================] - 22s 21ms/step - loss: 0.0012 - val_loss: 8.4956e-04
Epoch 29/200
1074/1074 [==============================] - 22s 21ms/step - loss: 0.0011 - val_loss: 8.9253e-04
Epoch 30/200
1074/1074 [==============================] - 22s 20ms/step - loss: 0.0012 - val_loss: 0.0014
Epoch 31/200
1074/1074 [==============================] - 23s 21ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 32/200
1074/1074 [==============================] - 23s 21ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 33/200
1074/1074 [==============================] - 22s 21ms/step - loss: 0.0011 - val_loss: 9.7054e-04
Epoch 34/200
1074/1074 [==============================] - 23s 22ms/step - loss: 0.0011 - val_loss: 0.0011
Epoch 35/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0010 - val_loss: 8.9636e-04
Epoch 36/200
1074/1074 [==============================] - 24s 22ms/step - loss: 0.0011 - val_loss: 7.5665e-04
Epoch 37/200
1074/1074 [==============================] - 23s 22ms/step - loss: 0.0010 - val_loss: 7.3449e-04
Epoch 38/200
1074/1074 [==============================] - 22s 21ms/step - loss: 9.7133e-04 - val_loss: 0.0010
Epoch 39/200
1074/1074 [==============================] - 23s 22ms/step - loss: 9.2998e-04 - val_loss: 9.5111e-04
Epoch 40/200
1074/1074 [==============================] - 23s 22ms/step - loss: 0.0010 - val_loss: 0.0018
Epoch 41/200
1074/1074 [==============================] - 23s 21ms/step - loss: 0.0010 - val_loss: 7.1462e-04
Epoch 42/200
1074/1074 [==============================] - 22s 21ms/step - loss: 9.3721e-04 - val_loss: 7.5925e-04
Epoch 43/200
1074/1074 [==============================] - 22s 21ms/step - loss: 9.3394e-04 - val_loss: 7.2891e-04
Epoch 44/200
1074/1074 [==============================] - 23s 21ms/step - loss: 9.1639e-04 - val_loss: 0.0011
Epoch 45/200
1074/1074 [==============================] - 24s 22ms/step - loss: 9.3805e-04 - val_loss: 0.0011
Epoch 46/200
1074/1074 [==============================] - 22s 21ms/step - loss: 8.7239e-04 - val_loss: 6.4246e-04
Epoch 47/200
1074/1074 [==============================] - 22s 20ms/step - loss: 8.4238e-04 - val_loss: 6.1866e-04
Epoch 48/200
1074/1074 [==============================] - 22s 21ms/step - loss: 8.3587e-04 - val_loss: 7.3110e-04
Epoch 49/200
1074/1074 [==============================] - 23s 22ms/step - loss: 8.4970e-04 - val_loss: 7.2496e-04
Epoch 50/200
1074/1074 [==============================] - 23s 22ms/step - loss: 8.0938e-04 - val_loss: 0.0021
Epoch 51/200
1074/1074 [==============================] - 23s 22ms/step - loss: 9.5248e-04 - val_loss: 5.9597e-04
Epoch 52/200
1074/1074 [==============================] - 24s 22ms/step - loss: 7.8316e-04 - val_loss: 6.1166e-04
Epoch 53/200
1074/1074 [==============================] - 24s 23ms/step - loss: 7.9006e-04 - val_loss: 9.3623e-04
Epoch 54/200
1074/1074 [==============================] - 21s 19ms/step - loss: 7.6945e-04 - val_loss: 6.0102e-04
Epoch 55/200
1074/1074 [==============================] - 21s 20ms/step - loss: 7.6665e-04 - val_loss: 6.0847e-04
Epoch 56/200
1074/1074 [==============================] - 21s 20ms/step - loss: 7.7292e-04 - val_loss: 5.8643e-04
Epoch 57/200
1074/1074 [==============================] - 21s 19ms/step - loss: 8.2555e-04 - val_loss: 9.2105e-04
Epoch 58/200
1074/1074 [==============================] - 21s 19ms/step - loss: 7.8854e-04 - val_loss: 5.5868e-04
Epoch 59/200
1074/1074 [==============================] - 21s 19ms/step - loss: 7.9790e-04 - val_loss: 5.5902e-04
Epoch 60/200
1074/1074 [==============================] - 21s 19ms/step - loss: 7.9076e-04 - val_loss: 7.1191e-04
Epoch 61/200
1074/1074 [==============================] - 20s 19ms/step - loss: 7.4309e-04 - val_loss: 6.6213e-04
Epoch 62/200
1074/1074 [==============================] - 24s 22ms/step - loss: 7.2834e-04 - val_loss: 7.2154e-04
Epoch 63/200
1074/1074 [==============================] - 20s 19ms/step - loss: 7.6878e-04 - val_loss: 7.3331e-04
Epoch 64/200
1074/1074 [==============================] - 20s 19ms/step - loss: 7.1345e-04 - val_loss: 5.4345e-04
Epoch 65/200
1074/1074 [==============================] - 21s 20ms/step - loss: 7.2506e-04 - val_loss: 6.1411e-04
Epoch 66/200
1074/1074 [==============================] - 21s 19ms/step - loss: 7.5283e-04 - val_loss: 8.5635e-04
Epoch 67/200
1074/1074 [==============================] - 21s 19ms/step - loss: 6.8359e-04 - val_loss: 5.2426e-04
Epoch 68/200
1074/1074 [==============================] - 20s 19ms/step - loss: 6.8344e-04 - val_loss: 5.6235e-04
Epoch 69/200
1074/1074 [==============================] - 18s 17ms/step - loss: 6.3535e-04 - val_loss: 6.1046e-04
Epoch 70/200
1074/1074 [==============================] - 22s 20ms/step - loss: 6.7331e-04 - val_loss: 5.0489e-04
Epoch 71/200
1074/1074 [==============================] - 24s 22ms/step - loss: 6.8715e-04 - val_loss: 0.0010
Epoch 72/200
1074/1074 [==============================] - 23s 21ms/step - loss: 6.4082e-04 - val_loss: 5.2093e-04
Epoch 73/200
1074/1074 [==============================] - 22s 20ms/step - loss: 6.5019e-04 - val_loss: 0.0013
Epoch 74/200
1074/1074 [==============================] - 20s 19ms/step - loss: 7.1931e-04 - val_loss: 6.0352e-04
Epoch 75/200
1074/1074 [==============================] - 21s 19ms/step - loss: 6.6624e-04 - val_loss: 5.1996e-04
Epoch 76/200
1074/1074 [==============================] - 21s 19ms/step - loss: 6.4913e-04 - val_loss: 6.5324e-04
Epoch 77/200
1074/1074 [==============================] - 21s 19ms/step - loss: 6.5657e-04 - val_loss: 5.2777e-04
Epoch 78/200
1074/1074 [==============================] - 21s 20ms/step - loss: 6.3671e-04 - val_loss: 5.7633e-04
Epoch 79/200
1074/1074 [==============================] - 20s 19ms/step - loss: 6.6045e-04 - val_loss: 5.1573e-04
Epoch 80/200
1074/1074 [==============================] - 20s 19ms/step - loss: 6.1654e-04 - val_loss: 0.0011
Epoch 81/200
1074/1074 [==============================] - 20s 19ms/step - loss: 6.1302e-04 - val_loss: 4.4432e-04
Epoch 82/200
1074/1074 [==============================] - 21s 19ms/step - loss: 6.4716e-04 - val_loss: 5.6473e-04
Epoch 83/200
1074/1074 [==============================] - 20s 19ms/step - loss: 6.0052e-04 - val_loss: 5.0470e-04
Epoch 84/200
1074/1074 [==============================] - 21s 19ms/step - loss: 6.7379e-04 - val_loss: 5.7961e-04
Epoch 85/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.7518e-04 - val_loss: 9.4791e-04
Epoch 86/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.9731e-04 - val_loss: 9.7181e-04
Epoch 87/200
1074/1074 [==============================] - 20s 19ms/step - loss: 6.1613e-04 - val_loss: 0.0013
Epoch 88/200
1074/1074 [==============================] - 21s 20ms/step - loss: 5.8203e-04 - val_loss: 6.2724e-04
Epoch 89/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.7950e-04 - val_loss: 6.2052e-04
Epoch 90/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.9223e-04 - val_loss: 7.1873e-04
Epoch 91/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.8441e-04 - val_loss: 5.9859e-04
Epoch 92/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.6209e-04 - val_loss: 8.7805e-04
Epoch 93/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.7741e-04 - val_loss: 4.5246e-04
Epoch 94/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.9354e-04 - val_loss: 4.6833e-04
Epoch 95/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.9536e-04 - val_loss: 5.2546e-04
Epoch 96/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.4501e-04 - val_loss: 7.5926e-04
Epoch 97/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.4974e-04 - val_loss: 4.3866e-04
Epoch 98/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.9776e-04 - val_loss: 4.2415e-04
Epoch 99/200
1074/1074 [==============================] - 21s 20ms/step - loss: 5.2774e-04 - val_loss: 4.2527e-04
Epoch 100/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.8031e-04 - val_loss: 0.0020
Epoch 101/200
1074/1074 [==============================] - 20s 18ms/step - loss: 5.6732e-04 - val_loss: 6.0306e-04
Epoch 102/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.2332e-04 - val_loss: 4.4763e-04
Epoch 103/200
1074/1074 [==============================] - 22s 20ms/step - loss: 5.5055e-04 - val_loss: 4.6692e-04
Epoch 104/200
1074/1074 [==============================] - 21s 20ms/step - loss: 5.4054e-04 - val_loss: 4.5075e-04
Epoch 105/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.7435e-04 - val_loss: 4.9084e-04
Epoch 106/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.5810e-04 - val_loss: 9.4552e-04
Epoch 107/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.6625e-04 - val_loss: 4.8375e-04
Epoch 108/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.0102e-04 - val_loss: 5.7859e-04
Epoch 109/200
1074/1074 [==============================] - 21s 20ms/step - loss: 5.1544e-04 - val_loss: 4.4714e-04
Epoch 110/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.1626e-04 - val_loss: 5.7263e-04
Epoch 111/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.1638e-04 - val_loss: 4.5909e-04
Epoch 112/200
1074/1074 [==============================] - 20s 19ms/step - loss: 5.1710e-04 - val_loss: 8.9958e-04
Epoch 113/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.0970e-04 - val_loss: 5.1271e-04
Epoch 114/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.2295e-04 - val_loss: 3.8889e-04
Epoch 115/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.9340e-04 - val_loss: 0.0010
Epoch 116/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.2764e-04 - val_loss: 4.5972e-04
Epoch 117/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.1409e-04 - val_loss: 5.0529e-04
Epoch 118/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.9007e-04 - val_loss: 3.7482e-04
Epoch 119/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.1477e-04 - val_loss: 5.0035e-04
Epoch 120/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.6944e-04 - val_loss: 5.0074e-04
Epoch 121/200
1074/1074 [==============================] - 22s 20ms/step - loss: 5.5427e-04 - val_loss: 5.6151e-04
Epoch 122/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.0312e-04 - val_loss: 5.6637e-04
Epoch 123/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.8618e-04 - val_loss: 3.9127e-04
Epoch 124/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.7500e-04 - val_loss: 4.9647e-04
Epoch 125/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.7931e-04 - val_loss: 5.4154e-04
Epoch 126/200
1074/1074 [==============================] - 21s 19ms/step - loss: 5.1182e-04 - val_loss: 4.5885e-04
Epoch 127/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.5749e-04 - val_loss: 3.6249e-04
Epoch 128/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.6603e-04 - val_loss: 0.0012
Epoch 129/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.6709e-04 - val_loss: 3.7903e-04
Epoch 130/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.7660e-04 - val_loss: 3.8530e-04
Epoch 131/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.7342e-04 - val_loss: 6.8236e-04
Epoch 132/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.5895e-04 - val_loss: 3.8387e-04
Epoch 133/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.6485e-04 - val_loss: 9.4629e-04
Epoch 134/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.6023e-04 - val_loss: 9.6124e-04
Epoch 135/200
1074/1074 [==============================] - 21s 20ms/step - loss: 4.6405e-04 - val_loss: 3.9247e-04
Epoch 136/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.5601e-04 - val_loss: 4.4690e-04
Epoch 137/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.6281e-04 - val_loss: 3.6494e-04
Epoch 138/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.5873e-04 - val_loss: 3.5702e-04
Epoch 139/200
1074/1074 [==============================] - 21s 20ms/step - loss: 4.6478e-04 - val_loss: 3.6494e-04
Epoch 140/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.5332e-04 - val_loss: 3.7507e-04
Epoch 141/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.4574e-04 - val_loss: 4.8202e-04
Epoch 142/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.4805e-04 - val_loss: 3.4560e-04
Epoch 143/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3386e-04 - val_loss: 4.6262e-04
Epoch 144/200
1074/1074 [==============================] - 22s 20ms/step - loss: 4.6080e-04 - val_loss: 3.6450e-04
Epoch 145/200
1074/1074 [==============================] - 21s 20ms/step - loss: 4.5444e-04 - val_loss: 8.1503e-04
Epoch 146/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.4133e-04 - val_loss: 3.7965e-04
Epoch 147/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.5574e-04 - val_loss: 4.2946e-04
Epoch 148/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3079e-04 - val_loss: 3.3870e-04
Epoch 149/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3629e-04 - val_loss: 3.4971e-04
Epoch 150/200
1074/1074 [==============================] - 21s 20ms/step - loss: 4.3126e-04 - val_loss: 3.3756e-04
Epoch 151/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.5804e-04 - val_loss: 3.3979e-04
Epoch 152/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.2822e-04 - val_loss: 4.5183e-04
Epoch 153/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.1960e-04 - val_loss: 3.6316e-04
Epoch 154/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3011e-04 - val_loss: 7.8224e-04
Epoch 155/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3268e-04 - val_loss: 5.8956e-04
Epoch 156/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3838e-04 - val_loss: 3.5009e-04
Epoch 157/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.1472e-04 - val_loss: 3.6038e-04
Epoch 158/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.9715e-04 - val_loss: 4.0928e-04
Epoch 159/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.4636e-04 - val_loss: 6.0987e-04
Epoch 160/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0861e-04 - val_loss: 3.3452e-04
Epoch 161/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0715e-04 - val_loss: 4.0337e-04
Epoch 162/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3563e-04 - val_loss: 3.6770e-04
Epoch 163/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.2997e-04 - val_loss: 3.8437e-04
Epoch 164/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0757e-04 - val_loss: 3.2869e-04
Epoch 165/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.9974e-04 - val_loss: 5.1447e-04
Epoch 166/200
1074/1074 [==============================] - 20s 19ms/step - loss: 4.1482e-04 - val_loss: 5.2801e-04
Epoch 167/200
1074/1074 [==============================] - 20s 19ms/step - loss: 3.9759e-04 - val_loss: 0.0011
Epoch 168/200
1074/1074 [==============================] - 20s 19ms/step - loss: 3.9068e-04 - val_loss: 3.3129e-04
Epoch 169/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.9815e-04 - val_loss: 3.4362e-04
Epoch 170/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0172e-04 - val_loss: 4.1211e-04
Epoch 171/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.1674e-04 - val_loss: 3.4290e-04
Epoch 172/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.9755e-04 - val_loss: 3.3585e-04
Epoch 173/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.3243e-04 - val_loss: 4.0721e-04
Epoch 174/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8343e-04 - val_loss: 3.2652e-04
Epoch 175/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0519e-04 - val_loss: 4.9598e-04
Epoch 176/200
1074/1074 [==============================] - 21s 20ms/step - loss: 3.9217e-04 - val_loss: 3.6478e-04
Epoch 177/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0509e-04 - val_loss: 4.9397e-04
Epoch 178/200
1074/1074 [==============================] - 20s 19ms/step - loss: 3.8602e-04 - val_loss: 3.3214e-04
Epoch 179/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.1872e-04 - val_loss: 3.4653e-04
Epoch 180/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8401e-04 - val_loss: 3.8487e-04
Epoch 181/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.1259e-04 - val_loss: 3.5890e-04
Epoch 182/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.7830e-04 - val_loss: 3.0889e-04
Epoch 183/200
1074/1074 [==============================] - 20s 19ms/step - loss: 3.8761e-04 - val_loss: 3.2656e-04
Epoch 184/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.7110e-04 - val_loss: 3.2661e-04
Epoch 185/200
1074/1074 [==============================] - 21s 20ms/step - loss: 3.7700e-04 - val_loss: 3.1749e-04
Epoch 186/200
1074/1074 [==============================] - 21s 19ms/step - loss: 4.0791e-04 - val_loss: 3.9594e-04
Epoch 187/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8130e-04 - val_loss: 4.0997e-04
Epoch 188/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.7577e-04 - val_loss: 3.4900e-04
Epoch 189/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8130e-04 - val_loss: 3.1304e-04
Epoch 190/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8231e-04 - val_loss: 3.5545e-04
Epoch 191/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8593e-04 - val_loss: 3.8047e-04
Epoch 192/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.7814e-04 - val_loss: 3.3120e-04
Epoch 193/200
1074/1074 [==============================] - 21s 20ms/step - loss: 3.9067e-04 - val_loss: 4.3370e-04
Epoch 194/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.9221e-04 - val_loss: 3.3149e-04
Epoch 195/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.6699e-04 - val_loss: 3.0195e-04
Epoch 196/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.7210e-04 - val_loss: 3.2532e-04
Epoch 197/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.6616e-04 - val_loss: 3.2568e-04
Epoch 198/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.8068e-04 - val_loss: 3.1402e-04
Epoch 199/200
1074/1074 [==============================] - 21s 19ms/step - loss: 3.7403e-04 - val_loss: 5.4345e-04
Epoch 200/200
1074/1074 [==============================] - 20s 19ms/step - loss: 3.9937e-04 - val_loss: 3.4783e-04
Model: "sequential_3"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 20, 100) 74000
dropout_12 (Dropout) (None, 20, 100) 0
lstm_1 (LSTM) (None, 100) 80400
dropout_13 (Dropout) (None, 100) 0
dense_18 (Dense) (None, 100) 10100
dense_19 (Dense) (None, 6) 606
=================================================================
Total params: 165,106
Trainable params: 165,106
Non-trainable params: 0
_________________________________________________________________
CPU times: user 3h 15min, sys: 50min 14s, total: 4h 5min 14s
Wall time: 1h 10min 1s
rnn_model_84.save(output_dir/"{}_{}.h5".format(rnn_model_84_tag, timestamp), save_traces=False)
plot_loss(history_rnn_84, 'error_vs_epoch_{}.pdf'.format(rnn_model_84_tag), 'DNN model (predict 6 forces from 84 input features)')
# save model loss on test set for evaluation section below
test_results['rnn_84'] = rnn_model_84.evaluate(X_seq_test, Y_seq_test, verbose=0)
Y_seq_test_pred = rnn_model_84.predict(X_seq_test)
plot_pred_vs_true(Y_seq_test_pred, Y_seq_test, 'pred_vs_true_{}'.format(rnn_model_84_tag), titles = ['$f_{x_1}$','$f_{y_1}$','$f_{z_1}$','$f_{x_2}$','$f_{y_2}$','$f_{z_2}$'])
print("loss on test sets:")
for key,val in test_results.items():
print("- {}: {:.2e}".format(key,val))
loss on test sets: - linear_x1: 1.48e-02 - linear_12: 2.71e-02 - dnn_12: 1.01e-03 - dnn_36: 5.31e-04 - dnn_84: 6.43e-04 - rnn_84: 3.45e-04
Loss after 500 epochs for DNN and 200 epochs for RNN:
def create_separate_test_sets(df, features, outputs, n_steps=20, feature_idx=None, dnn_model = None, rnn_model = None):
# select relevant features and outputs
X = df[features].to_numpy()
Y = df[outputs].to_numpy()
# apply scaling
X_normed = scaler_x.transform(X)
Y_normed = scaler_y.transform(Y)
# handle sequences
X_seq, Y_seq = split_sequences(X_normed, Y_normed, n_steps)
# select indices corresponding to desired feature
if feature_idx:
X = X[:,feature_idx]
X_normed = X_normed[:,feature_idx]
X_seq = X_seq[:,:,feature_idx]
outputs = {
'X': X, 'Y': Y,
'X_normed': X_normed, 'Y_normed': Y_normed,
'X_seq_normed': X_seq, 'Y_seq_normed': Y_seq,
'Y_seq': scaler_y.inverse_transform(Y_seq)
}
# calculate model predictions
if dnn_model:
Y_pred_normed = dnn_model.predict(X_normed)
Y_pred = scaler_y.inverse_transform(Y_pred_normed)
outputs['Y_pred_normed'] = Y_pred_normed
outputs['Y_pred'] = Y_pred
if rnn_model:
Y_seq_pred_normed = rnn_model.predict(X_seq)
outputs['Y_seq_pred_normed'] = Y_seq_pred_normed
Y_seq_pred = scaler_y.inverse_transform(Y_seq_pred_normed)
outputs['Y_seq_pred'] = Y_seq_pred
return outputs
tests_12 = dict()
for i,df in enumerate(datasets):
tests_12[dataset_filenames[i]] = create_separate_test_sets(
df, features_nth, outputs, n_steps, feature_idx=feature_idx,
dnn_model=dnn_model_12, rnn_model=None)
tests_36 = dict()
for i,df in enumerate(datasets):
tests_36[dataset_filenames[i]] = create_separate_test_sets(
df, features_nth, outputs, n_steps, feature_idx=feature_idx_2nd,
dnn_model=dnn_model_36, rnn_model=None)
tests_84 = dict()
for i,df in enumerate(datasets):
tests_84[dataset_filenames[i]] = create_separate_test_sets(
df, features_nth, outputs, n_steps, feature_idx=None,
dnn_model=dnn_model_84, rnn_model=rnn_model_84)
print("Loss on full Test1, Test2, Test4 datasets:")
for filename in dataset_filenames:
loss_dnn12 = dnn_model_12.evaluate(
tests_12[filename]['X_normed'], tests_12[filename]['Y_normed'], verbose=0)
loss_dnn36 = dnn_model_36.evaluate(
tests_36[filename]['X_normed'], tests_36[filename]['Y_normed'], verbose=0)
loss_dnn84 = dnn_model_84.evaluate(
tests_84[filename]['X_normed'], tests_84[filename]['Y_normed'], verbose=0)
loss_rnn84 = rnn_model_84.evaluate(
tests_84[filename]['X_seq_normed'], tests_84[filename]['Y_seq_normed'], verbose=0)
print("- {}:\n DNN-12: {:.2e}\n DNN-36: {:.2e}\n DNN-84: {:.2e}\n RNN-84: {:.2e}".format(
filename, loss_dnn12, loss_dnn36, loss_dnn84, loss_rnn84))
Loss on full Test1, Test2, Test4 datasets: - Test1: DNN-12: 7.20e-04 DNN-36: 3.48e-04 DNN-84: 3.98e-04 RNN-84: 2.29e-04 - Test2: DNN-12: 7.52e-02 DNN-36: 3.30e-02 DNN-84: 5.91e-02 RNN-84: 4.01e-03 - Test4: DNN-12: 1.08e-03 DNN-36: 5.66e-04 DNN-84: 7.10e-04 RNN-84: 3.24e-04
Observations:
tmin = 0
tmax = -1
bins=50
linewidth=3
sns.set(font_scale = 2)
sns.color_palette()
sns.set_style("whitegrid")
for filename in dataset_filenames:
Y_err_12 = tests_12[filename]['Y_normed'] - tests_12[filename]['Y_pred_normed']
Y_err_36 = tests_36[filename]['Y_normed'] - tests_36[filename]['Y_pred_normed']
Y_err_84 = tests_84[filename]['Y_normed'] - tests_84[filename]['Y_pred_normed']
Y_seq_err_84 = tests_84[filename]['Y_seq_normed'] - tests_84[filename]['Y_seq_pred_normed']
fig = plt.figure(figsize=(28,16))
fig.suptitle(filename, weight='bold').set_fontsize('24')
for i in range(len(outputs)):
label_dnn12 = "DNN-12: $\sigma={:.2f}$".format(np.std(Y_err_12[:,i]))
label_dnn36 = "DNN-36: $\sigma={:.2f}$".format(np.std(Y_err_36[:,i]))
label_dnn84 = "DNN-84: $\sigma={:.2f}$".format(np.std(Y_err_84[:,i]))
label_rnn84 = "RNN-84: $\sigma={:.2f}$".format(np.std(Y_seq_err_84[:,i]))
ax = fig.add_subplot(2,3,i+1)
ax.hist(Y_err_12[:,i], bins=bins, range=(-0.25, 0.25), alpha=0.8, histtype='step', linewidth=linewidth, label=label_dnn12)
ax.hist(Y_err_36[:,i], bins=bins, range=(-0.25, 0.25), alpha=0.8, histtype='step', linewidth=linewidth, label=label_dnn36)
ax.hist(Y_err_84[:,i], bins=bins, range=(-0.25, 0.25), alpha=0.8, histtype='step', linewidth=linewidth, label=label_dnn84)
ax.hist(Y_seq_err_84[:,i], bins=bins, range=(-0.25, 0.25), alpha=0.8, histtype='step', linewidth=linewidth, label=label_rnn84)
ax.set_xlabel('Error {}'.format(outputs[i]))
ax.set_ylabel('# entries')
ax.legend()
plt.tight_layout()
plt.savefig(output_dir/'{}_error.pdf'.format(filename))
Observations:
from sklearn.metrics import r2_score
print("R2 score on Test1, Test2, Test4 datasets:")
print("(" + " ".join(outputs) + ")")
for filename in dataset_filenames:
r2_dnn12 = ['DNN-12:']
r2_dnn36 = ['DNN-36:']
r2_dnn84 = ['DNN-84:']
r2_rnn84 = ['RNN-84:']
for i in range(len(outputs)):
r2_dnn12.append("{:.2f}".format(r2_score(tests_12[filename]['Y_normed'][:,i], tests_12[filename]['Y_pred_normed'][:,i])))
r2_dnn36.append("{:.2f}".format(r2_score(tests_36[filename]['Y_normed'][:,i], tests_36[filename]['Y_pred_normed'][:,i])))
r2_dnn84.append("{:.2f}".format(r2_score(tests_84[filename]['Y_normed'][:,i], tests_84[filename]['Y_pred_normed'][:,i])))
r2_rnn84.append("{:.2f}".format(r2_score(tests_84[filename]['Y_seq_normed'][:,i], tests_84[filename]['Y_seq_pred_normed'][:,i])))
print("- {}:".format(filename))
print("\t"+"\t".join(r2_dnn12))
print("\t"+"\t".join(r2_dnn36))
print("\t"+"\t".join(r2_dnn84))
print("\t"+"\t".join(r2_rnn84))
R2 score on Test1, Test2, Test4 datasets: (fx_1 fy_1 fz_1 fx_2 fy_2 fz_2) - Test1: DNN-12: 0.96 0.99 0.98 0.97 0.99 0.95 DNN-36: 0.99 1.00 0.99 0.99 1.00 0.96 DNN-84: 0.99 1.00 0.98 0.99 0.99 0.96 RNN-84: 0.99 1.00 0.99 0.99 1.00 0.98 - Test2: DNN-12: 0.51 -2.04 0.74 0.51 -2.03 -0.00 DNN-36: 0.88 -0.29 0.72 0.90 -0.33 0.21 DNN-84: 0.80 -1.43 0.76 0.80 -1.47 0.17 RNN-84: 0.89 0.94 0.81 0.92 0.96 0.37 - Test4: DNN-12: 0.94 0.99 0.98 0.96 0.98 0.86 DNN-36: 0.98 0.99 0.98 0.98 0.99 0.89 DNN-84: 0.98 0.99 0.98 0.97 0.99 0.87 RNN-84: 0.99 1.00 0.99 0.99 0.99 0.94
The coefficient of determination (R2) is the proportion of the variation in the dependent variable (e.g., predicted forces) that is predictable from the independent variables (e.g., measured forces). The range is from negative infinity to +1.
Observations:
def pearson(x, y):
corr = np.corrcoef(x, y)
return corr[0,1]
print("Pearson correlations for Test1, Test2, Test4 datasets:")
print("(" + " ".join(outputs) + ")")
for filename in dataset_filenames:
pearson_dnn12 = ['DNN-12:']
pearson_dnn36 = ['DNN-36:']
pearson_dnn84 = ['DNN-84:']
pearson_rnn84 = ['RNN-84:']
for i in range(len(outputs)):
pearson_dnn12.append("{:.2f}".format(pearson(tests_12[filename]['Y_normed'][:,i], tests_12[filename]['Y_pred_normed'][:,i])))
pearson_dnn36.append("{:.2f}".format(pearson(tests_36[filename]['Y_normed'][:,i], tests_36[filename]['Y_pred_normed'][:,i])))
pearson_dnn84.append("{:.2f}".format(pearson(tests_84[filename]['Y_normed'][:,i], tests_84[filename]['Y_pred_normed'][:,i])))
pearson_rnn84.append("{:.2f}".format(pearson(tests_84[filename]['Y_seq_normed'][:,i], tests_84[filename]['Y_seq_pred_normed'][:,i])))
print("- {}:".format(filename))
print("\t"+"\t".join(pearson_dnn12))
print("\t"+"\t".join(pearson_dnn36))
print("\t"+"\t".join(pearson_dnn84))
print("\t"+"\t".join(pearson_rnn84))
Pearson correlations for Test1, Test2, Test4 datasets: (fx_1 fy_1 fz_1 fx_2 fy_2 fz_2) - Test1: DNN-12: 0.98 1.00 0.99 0.99 1.00 0.98 DNN-36: 1.00 1.00 0.99 1.00 1.00 0.98 DNN-84: 1.00 1.00 0.99 1.00 1.00 0.98 RNN-84: 1.00 1.00 1.00 1.00 1.00 0.99 - Test2: DNN-12: 0.74 -0.55 0.87 0.73 -0.58 0.36 DNN-36: 0.94 0.38 0.87 0.95 0.34 0.57 DNN-84: 0.90 -0.31 0.89 0.90 -0.34 0.50 RNN-84: 0.95 0.98 0.93 0.96 0.98 0.62 - Test4: DNN-12: 0.97 0.99 0.99 0.98 0.99 0.93 DNN-36: 0.99 1.00 0.99 0.99 0.99 0.95 DNN-84: 0.99 1.00 0.99 0.99 0.99 0.94 RNN-84: 1.00 1.00 1.00 0.99 1.00 0.97
The Pearson correlation coefficient ($r$) can range from -1 to +1.
Observations:
def plot_timeseries(tmin = 12000, tmax = 12500):
t = np.linspace(tmin, tmax, tmax-tmin)
linewidth = 3
sns.set(font_scale = 2)
sns.color_palette()
sns.set_style("whitegrid")
for filename in dataset_filenames:
fig = plt.figure(figsize=(30,30))
fig.suptitle(filename, weight='bold').set_fontsize('24')
for i in range(6):
ax = fig.add_subplot(6, 1, i+1)
ax.plot(t, tests_12[filename]['Y'].T[i][tmin+n_steps-1:tmax+n_steps-1], label='Data', linewidth=linewidth, color='k')
ax.plot(t, tests_12[filename]['Y_pred'].T[i][tmin+n_steps-1:tmax+n_steps-1], label='DNN-12 pred.', linestyle='dashed', linewidth=linewidth-1)
ax.plot(t, tests_36[filename]['Y_pred'].T[i][tmin+n_steps-1:tmax+n_steps-1], label='DNN-36 pred.', linestyle='dashed', linewidth=linewidth-1)
ax.plot(t, tests_84[filename]['Y_pred'].T[i][tmin+n_steps-1:tmax+n_steps-1], label='DNN-84 pred.', linestyle='dashed', linewidth=linewidth)
ax.plot(t, tests_84[filename]['Y_seq_pred'].T[i][tmin:tmax], label='RNN-84 pred.', linestyle='dashed', linewidth=linewidth)
ax.set_xlabel('t')
ax.set_ylabel(outputs[i])
ax.legend(loc=2)
plt.tight_layout()
plt.savefig(output_dir/'{}_timeseries_t{}to{}.pdf'.format(filename, tmin, tmax))
plot_timeseries(tmin=0, tmax=500)
plot_timeseries(tmin=10000, tmax=10500)
plot_timeseries(tmin=15000, tmax=15500)
plot_timeseries(tmin=17000, tmax=17500)
plot_timeseries(tmin=0, tmax=15000)
Four neural networks (three DNNs and one RNN) were trained to predict tool-tip forces from input positions and angles (and their higher-order derivatives). The models were trained on 70% of the combined Test1 and Test4 datasets. Test2 was left out of the training entirely to evaluate the performance of the models on unseen runs of the robots.
The RNN model clearly outperformed the DNN models. This is especially clear from looking at the predicted tool-tip forces as a function of time for the Test2 dataset (unseen during training) above.
Adding the first-order (velocity) and second-order (acceleration) derivatives of the positions and angles as additional input features reduced the loss and significantly improved performance. Adding up to 6th order derivatives seemed helpful in some earlier DNN models (with fewer layers) that I trained (not shown in this notebook). However, the performance comparisons between DNN-84 and DNN-36 in this notebook indicate that the impact is marginal.
Although the RNN did a decent job, all models struggled in generalizing to Test2. A more comprehensive hyperparameter optimization could be done to improve the performance. Adding training data from more runs with robots would probably help a lot.
You would probably want to further optimize and tailor the model to the application. A deeper (and recurrent) model will likely perform better if the goal is to achieve optimal accuracy. However, if the goal is to run fast inference on resource-constrained hardware, you'd want to optimize a smaller model that is good enough for the job.